galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

29 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/36591da93d40/
changeset: 36591da93d40
user: dannon
date: 2012-08-29 16:20:49
summary: Missed .hgignore change, reverted.
affected #: 1 file
diff -r 07045f4895170a154c955f48291f2b5e6feb94c2 -r 36591da93d40a924a6276aefa49edcf8a675437c .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -78,7 +78,3 @@
*.rej
*~
-syntax: regexp
-^database$
-syntax: regexp
-^scripts/api/spp_submodule\.ga$
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
10 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/849e1713f613/
changeset: 849e1713f613
user: rpark37
date: 2012-02-01 22:21:38
summary: Updated scripts for API workflow enhancements and changing wor=
kflow parameters programatically
affected #: 6 files
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -78,3 +78,7 @@
*.rej
*~
=20
+syntax: regexp
+^database$
+syntax: regexp
+^scripts/api/spp_submodule\.ga$
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -84,6 +84,20 @@
=20
However, we will import them if installed_repository_file is speci=
fied
"""
+
+ # ----------------------------------------------------------------=
--------------- # =20
+ ### RPARK: dictionary containing which workflows to change and edi=
t ###
+ param_map =3D {};
+ if (payload.has_key('parameters') ):
+ #if (payload['parameters']):
+ param_map =3D payload['parameters'];
+ print("PARAMETER MAP:");
+ print(param_map);
+ # ----------------------------------------------------------------=
--------------- # =20
+ =20
+
+ =20
+ =20
if 'workflow_id' not in payload:
# create new
if 'installed_repository_file' in payload:
@@ -168,6 +182,30 @@
# are not persisted so we need to do it every time)
step.module.add_dummy_datasets( connections=3Dstep.input_c=
onnections )
step.state =3D step.module.state
+ =20
+ ####################################################
+ ####################################################
+ #print("CHECKING WORKFLOW STEPS:")
+ #print(step.tool_id);
+ #print(step.state.inputs);
+ #print("upgard messages");
+ #print(step.state);
+ #print("\n");
+ # RPARK: IF TOOL_NAME IN PARAMETER MAP #
+ if step.tool_id in param_map:
+ #print("-------------------------FOUND IN PARAMETER DI=
CTIONARY")
+ #print(param_map[step.tool_id]);
+ change_param =3D param_map[step.tool_id]['param'];
+ change_value =3D param_map[step.tool_id]['value'];
+ #step.state.inputs['refGenomeSource']['index'] =3D "cr=
apolo";
+ #print(step.state.inputs[change_param]);
+ step.state.inputs[change_param] =3D change_value;
+ #print(step.state.inputs[change_param]);
+ #print(param_map[step.tool_id][change_value]);
+ #print("----------------------------------------------=
----")
+ ####################################################
+ ####################################################
+ =20
if step.tool_errors:
trans.response.status =3D 400
return "Workflow cannot be run because of validation e=
rrors in some steps: %s" % step_errors
@@ -220,3 +258,343 @@
trans.sa_session.flush()
return rval
=20
+ # --------------------------------------------------------------------=
-------------------------- #
+ # --------------------------------------------------------------------=
-------------------------- #
+ # ---- RPARK EDITS ---- #
+ # --------------------------------------------------------------------=
-------------------------- #
+ # --------------------------------------------------------------------=
-------------------------- #
+ @web.expose_api
+ @web.json
+ def workflow_dict( self, trans, workflow_id, **kwd ):
+ """
+ GET /api/workflows/{encoded_workflow_id}/download
+ Returns a selected workflow as a json dictionary.=20
+ """
+ print "workflow controller: workflow dict called"
+ print workflow_id
+ =20
+ try:
+ stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
+ except Exception,e:
+ return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
+ =20
+ # check to see if user has permissions to selected workflow=20
+ if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
+ if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
+ trans.response.status =3D 400
+ return("Workflow is not owned by or shared with current us=
er")
+ =20
+ return self._workflow_to_dict( trans, stored_workflow )
+ =20
+ @web.expose_api
+ def delete( self, trans, id, **kwd ): =20
+ """
+ DELETE /api/workflows/{encoded_workflow_id}
+ Deletes a specified workflow
+ Author: rpark
+ =20
+ copied from galaxy.web.controllers.workflows.py (delete)
+ """
+ workflow_id =3D id;
+ =20
+ try:
+ stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
+ except Exception,e:
+ return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
+ =20
+ # check to see if user has permissions to selected workflow=20
+ if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
+ if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
+ trans.response.status =3D 400
+ return("Workflow is not owned by or shared with current us=
er")
+
+ #Mark a workflow as deleted
+ stored_workflow.deleted =3D True
+ trans.sa_session.flush()
+ =20
+ # Python Debugger
+ #import pdb; pdb.set_trace()
+ =20
+ # TODO: Unsure of response message to let api know that a workflow=
was successfully deleted
+ #return 'OK'
+ return ( "Workflow '%s' successfully deleted" % stored_workflow.na=
me )
+ =20
+ @web.expose_api
+ def import_new_workflow(self, trans, payload, **kwd):
+ """
+ POST /api/workflows
+ Importing dynamic workflows from the api. Return newly generated w=
orkflow id.
+ Author: rpark=20
+ =20
+ # currently assumes payload['workflow'] is a json representation o=
f a workflow to be inserted into the database
+ """
+ =20
+ #import pdb; pdb.set_trace()
+ =20
+ data =3D payload['workflow'];
+ workflow, missing_tool_tups =3D self._workflow_from_dict( trans, d=
ata, source=3D"API" )
+ =20
+ # galaxy workflow newly created id =20
+ workflow_id =3D workflow.id;
+ # api encoded, id=20
+ encoded_id =3D trans.security.encode_id(workflow_id);
+ =20
+ =20
+ =20
+ # return list
+ rval=3D [];
+ =20
+ item =3D workflow.get_api_value(value_mapper=3D{'id':trans.securit=
y.encode_id})
+ item['url'] =3D url_for('workflow', id=3Dencoded_id)
+ =20
+ rval.append(item); =20
+ =20
+ return rval;
+ =20
+
+ def _workflow_from_dict( self, trans, data, source=3DNone ):
+ """
+ RPARK: copied from galaxy.web.controllers.workflows.py
+ Creates a workflow from a dict. Created workflow is stored in the =
database and returned.
+ """
+ # Put parameters in workflow mode
+ trans.workflow_building_mode =3D True
+ # Create new workflow from incoming dict
+ workflow =3D model.Workflow()
+ # If there's a source, put it in the workflow name.
+ if source:
+ name =3D "%s (imported from %s)" % ( data['name'], source )
+ else:
+ name =3D data['name']
+ workflow.name =3D name
+ # Assume no errors until we find a step that has some
+ workflow.has_errors =3D False
+ # Create each step
+ steps =3D []
+ # The editor will provide ids for each step that we don't need to =
save,
+ # but do need to use to make connections
+ steps_by_external_id =3D {}
+ # Keep track of tools required by the workflow that are not availa=
ble in
+ # the local Galaxy instance. Each tuple in the list of missing_to=
ol_tups
+ # will be ( tool_id, tool_name, tool_version ).
+ missing_tool_tups =3D []
+ # First pass to build step objects and populate basic values
+ for key, step_dict in data[ 'steps' ].iteritems():
+ # Create the model class for the step
+ step =3D model.WorkflowStep()
+ steps.append( step )
+ steps_by_external_id[ step_dict['id' ] ] =3D step
+ # FIXME: Position should be handled inside module
+ step.position =3D step_dict['position']
+ module =3D module_factory.from_dict( trans, step_dict, secure=
=3DFalse )
+ if module.type =3D=3D 'tool' and module.tool is None:
+ # A required tool is not available in the local Galaxy ins=
tance.
+ missing_tool_tup =3D ( step_dict[ 'tool_id' ], step_dict[ =
'name' ], step_dict[ 'tool_version' ] )
+ if missing_tool_tup not in missing_tool_tups:
+ missing_tool_tups.append( missing_tool_tup )
+ module.save_to_step( step )
+ if step.tool_errors:
+ workflow.has_errors =3D True
+ # Stick this in the step temporarily
+ step.temp_input_connections =3D step_dict['input_connections']
+ =20
+ # Save step annotation.
+ annotation =3D step_dict[ 'annotation' ]
+ if annotation:
+ annotation =3D sanitize_html( annotation, 'utf-8', 'text/h=
tml' )
+ # ------------------------------------------ #
+ # RPARK REMOVING: user annotation b/c of API
+ #self.add_item_annotation( trans.sa_session, trans.get_use=
r(), step, annotation )
+ # ------------------------------------------ #
+ =20
+ # Unpack and add post-job actions.
+ post_job_actions =3D step_dict.get( 'post_job_actions', {} )
+ for name, pja_dict in post_job_actions.items():
+ pja =3D PostJobAction( pja_dict[ 'action_type' ],=20
+ step, pja_dict[ 'output_name' ],=20
+ pja_dict[ 'action_arguments' ] )
+ # Second pass to deal with connections between steps
+ for step in steps:
+ # Input connections
+ for input_name, conn_dict in step.temp_input_connections.iteri=
tems():
+ if conn_dict:
+ conn =3D model.WorkflowStepConnection()
+ conn.input_step =3D step
+ conn.input_name =3D input_name
+ conn.output_name =3D conn_dict['output_name']
+ conn.output_step =3D steps_by_external_id[ conn_dict['=
id'] ]
+ del step.temp_input_connections
+ # Order the steps if possible
+ attach_ordered_steps( workflow, steps )
+ # Connect up
+ stored =3D model.StoredWorkflow()
+ stored.name =3D workflow.name
+ workflow.stored_workflow =3D stored
+ stored.latest_workflow =3D workflow
+ stored.user =3D trans.user
+ # Persist
+ trans.sa_session.add( stored )
+ trans.sa_session.flush()
+ return stored, missing_tool_tups
+ =20
+ def _workflow_to_dict( self, trans, stored ):
+ """
+ RPARK: copied from galaxy.web.controllers.workflows.py
+ Converts a workflow to a dict of attributes suitable for exporting.
+ """
+ workflow =3D stored.latest_workflow
+ =20
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ workflow_annotation =3D self.get_item_annotation_obj( trans.sa_ses=
sion, trans.user, stored )
+ annotation_str =3D ""
+ if workflow_annotation:
+ annotation_str =3D workflow_annotation.annotation
+ ### ----------------------------------- ###
+ =20
+ =20
+ # Pack workflow data into a dictionary and return
+ data =3D {}
+ data['a_galaxy_workflow'] =3D 'true' # Placeholder for identifying=
galaxy workflow
+ data['format-version'] =3D "0.1"
+ data['name'] =3D workflow.name
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ data['annotation'] =3D annotation_str
+ ### ----------------------------------- ###
+ =20
+ data['steps'] =3D {}
+ # For each step, rebuild the form and encode the state
+ for step in workflow.steps:
+ # Load from database representation
+ module =3D module_factory.from_workflow_step( trans, step )
+ =20
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ # Get user annotation.
+ step_annotation =3D self.get_item_annotation_obj(trans.sa_sess=
ion, trans.user, step )
+ annotation_str =3D ""
+ if step_annotation:
+ annotation_str =3D step_annotation.annotation =20
+ ### ----------------------------------- ###
+ =20
+ # Step info
+ step_dict =3D {
+ 'id': step.order_index,
+ 'type': module.type,
+ 'tool_id': module.get_tool_id(),
+ 'tool_version' : step.tool_version,
+ 'name': module.get_name(),
+ 'tool_state': module.get_state( secure=3DFalse ),
+ 'tool_errors': module.get_errors(),
+ ## 'data_inputs': module.get_data_inputs(),
+ ## 'data_outputs': module.get_data_outputs(),
+ =20
+ ### ----------------------------------- ###
+ ## RPARK EDIT ##
+ 'annotation' : annotation_str
+ ### ----------------------------------- ###
+ =20
+ }
+ # Add post-job actions to step dict.
+ if module.type =3D=3D 'tool':
+ pja_dict =3D {}
+ for pja in step.post_job_actions:
+ pja_dict[pja.action_type+pja.output_name] =3D dict( ac=
tion_type =3D pja.action_type,=20
+ outp=
ut_name =3D pja.output_name,
+ acti=
on_arguments =3D pja.action_arguments )
+ step_dict[ 'post_job_actions' ] =3D pja_dict
+ # Data inputs
+ step_dict['inputs'] =3D []
+ if module.type =3D=3D "data_input":
+ # Get input dataset name; default to 'Input Dataset'
+ name =3D module.state.get( 'name', 'Input Dataset')
+ step_dict['inputs'].append( { "name" : name, "description"=
: annotation_str } )
+ else:
+ # Step is a tool and may have runtime inputs.
+ for name, val in module.state.inputs.items():
+ input_type =3D type( val )
+ if input_type =3D=3D RuntimeValue:
+ step_dict['inputs'].append( { "name" : name, "desc=
ription" : "runtime parameter for tool %s" % module.get_name() } )
+ elif input_type =3D=3D dict:
+ # Input type is described by a dict, e.g. indexed =
parameters.
+ for partname, partval in val.items():
+ if type( partval ) =3D=3D RuntimeValue:
+ step_dict['inputs'].append( { "name" : nam=
e, "description" : "runtime parameter for tool %s" % module.get_name() } )
+ # User outputs
+ step_dict['user_outputs'] =3D []
+ """
+ module_outputs =3D module.get_data_outputs()
+ step_outputs =3D trans.sa_session.query( WorkflowOutput ).filt=
er( step=3Dstep )
+ for output in step_outputs:
+ name =3D output.output_name
+ annotation =3D ""
+ for module_output in module_outputs:
+ if module_output.get( 'name', None ) =3D=3D name:
+ output_type =3D module_output.get( 'extension', ''=
)
+ break
+ data['outputs'][name] =3D { 'name' : name, 'annotation' : =
annotation, 'type' : output_type }
+ """
+
+ # All step outputs
+ step_dict['outputs'] =3D []
+ if type( module ) is ToolModule:
+ for output in module.get_data_outputs():
+ step_dict['outputs'].append( { 'name' : output['name']=
, 'type' : output['extensions'][0] } )
+ # Connections
+ input_connections =3D step.input_connections
+ if step.type is None or step.type =3D=3D 'tool':
+ # Determine full (prefixed) names of valid input datasets
+ data_input_names =3D {}
+ def callback( input, value, prefixed_name, prefixed_label =
):
+ if isinstance( input, DataToolParameter ):
+ data_input_names[ prefixed_name ] =3D True
+ visit_input_values( module.tool.inputs, module.state.input=
s, callback )
+ # Filter
+ # FIXME: this removes connection without displaying a mess=
age currently!
+ input_connections =3D [ conn for conn in input_connections=
if conn.input_name in data_input_names ]
+ # Encode input connections as dictionary
+ input_conn_dict =3D {}
+ for conn in input_connections:
+ input_conn_dict[ conn.input_name ] =3D \
+ dict( id=3Dconn.output_step.order_index, output_name=
=3Dconn.output_name )
+ step_dict['input_connections'] =3D input_conn_dict
+ # Position
+ step_dict['position'] =3D step.position
+ # Add to return value
+ data['steps'][step.order_index] =3D step_dict
+ return data
+ =20
+ def get_item_annotation_obj( self, db_session, user, item ):
+ """=20
+ RPARK: copied from galaxy.model.item_attr.py
+ Returns a user's annotation object for an item. """
+ # Get annotation association class.
+ annotation_assoc_class =3D self._get_annotation_assoc_class( item )
+ if not annotation_assoc_class:
+ return None
+ =20
+ # Get annotation association object.
+ annotation_assoc =3D db_session.query( annotation_assoc_class ).fi=
lter_by( user=3Duser )
+ =20
+ # TODO: use filtering like that in _get_item_id_filter_str()
+ if item.__class__ =3D=3D galaxy.model.History:
+ annotation_assoc =3D annotation_assoc.filter_by( history=3Dite=
m )
+ elif item.__class__ =3D=3D galaxy.model.HistoryDatasetAssociation:
+ annotation_assoc =3D annotation_assoc.filter_by( hda=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.StoredWorkflow:
+ annotation_assoc =3D annotation_assoc.filter_by( stored_workfl=
ow=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.WorkflowStep:
+ annotation_assoc =3D annotation_assoc.filter_by( workflow_step=
=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.Page:
+ annotation_assoc =3D annotation_assoc.filter_by( page=3Ditem )
+ elif item.__class__ =3D=3D galaxy.model.Visualization:
+ annotation_assoc =3D annotation_assoc.filter_by( visualization=
=3Ditem )
+ return annotation_assoc.first()
+ =20
+ def _get_annotation_assoc_class( self, item ):
+ """=20
+ RPARK: copied from galaxy.model.item_attr.py
+ Returns an item's item-annotation association class. """
+ class_name =3D '%sAnnotationAssociation' % item.__class__.__name__
+ return getattr( galaxy.model, class_name, None )
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -151,6 +151,20 @@
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_=
prefix=3D'/api' )
#webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_i=
d}/library/{library_id}', controller=3D'workflows', action=3D'run', workflo=
w_id=3DNone, library_id=3DNone, conditions=3Ddict(method=3D["GET"]) )
=20
+ # ---------------------------------------------- #
+ # ---------------------------------------------- #
+ # RPARK EDIT=20
+ =20
+ # How to extend API: url_mapping=20
+ # "POST /api/workflows/import" =3D> ``workflows.import_workflow(=
)``.
+ # Defines a named route "import_workflow".
+ webapp.api_mapper.connect("import_workflow", "/api/workflows/uploa=
d", controller=3D"workflows", action=3D"import_new_workflow", conditions=3D=
dict(method=3D["POST"]))
+ webapp.api_mapper.connect("workflow_dict", '/api/workflows/downloa=
d/{workflow_id}', controller=3D'workflows', action=3D'workflow_dict', condi=
tions=3Ddict(method=3D['GET']))
+ =20
+ #import pdb; pdb.set_trace() =20
+ # ---------------------------------------------- #
+ # ---------------------------------------------- #
+ =20
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 scripts/api/workflow_delete_workflow_rpark.py
--- /dev/null
+++ b/scripts/api/workflow_delete_workflow_rpark.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+"""
+# Author: RPARK
+API script for deleting workflows=20
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import delete
+
+try:
+ assert sys.argv[2]
+except IndexError:
+ print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sy=
s.argv[0] )
+ sys.exit( 1 )
+try:
+ data =3D {}
+ data[ 'purge' ] =3D sys.argv[3]
+except IndexError:
+ pass
+
+delete( sys.argv[1], sys.argv[2], data )
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 scripts/api/workflow_execute_rpark.py
--- /dev/null
+++ b/scripts/api/workflow_execute_rpark.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+"""
+Execute workflows from the command line.
+Example calls:
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
+"""
+
+"""
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
+
+'param=3Dtool=3Dname=3Dvalue'
+
+Example=20
+python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbwa'=20
+
+python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
+
+python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpeakca=
lling_spp=3Dwindow_size=3D1000'=20
+
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+
+def main():
+ try:
+ print("workflow_execute:py:");
+ data =3D {}
+ data['workflow_id'] =3D sys.argv[3]
+ data['history'] =3D sys.argv[4]
+ data['ds_map'] =3D {}
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ data['parameters'] =3D {};
+
+ # DBTODO If only one input is given, don't require a step
+ # mapping, just use it for everything?
+ for v in sys.argv[5:]:
+ print("Multiple arguments ");
+ print(v);
+
+ try:
+ step, src, ds_id =3D v.split('=3D');
+ data['ds_map'][step] =3D {'src':src, 'id':ds_id};
+
+ except ValueError:
+ print("VALUE ERROR:");
+ wtype, wtool, wparam, wvalue =3D v.split('=3D');
+ data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
+
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
+ #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
+
+ except IndexError:
+ print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
+ sys.exit(1)
+ submit( sys.argv[1], sys.argv[2], data )
+
+if __name__ =3D=3D '__main__':
+ main()
+
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 849e1713f613a1932595b82=
fcd0e65a19bf5e366 scripts/api/workflow_import_from_file_rpark.py
--- /dev/null
+++ b/scripts/api/workflow_import_from_file_rpark.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+"""
+
+python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f=
http://localhost:8080/api/workflows/import 'spp_submodule.ga'
+python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f=
http://localhost:8080/api/workflows/import 'spp_submodule.ga'
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+### Rpark edit ###
+import simplejson
+
+def openWorkflow(in_file): =20
+ with open(in_file) as f:
+ temp_data =3D simplejson.load(f)
+ return temp_data;
+
+
+
+try:
+ assert sys.argv[2]
+except IndexError:
+ print 'usage: %s key url [name] ' % os.path.basename( sys.argv[0] )
+ sys.exit( 1 )
+try:
+ #data =3D {}
+ #data[ 'name' ] =3D sys.argv[3]
+ data =3D {};
+ workflow_dict =3D openWorkflow(sys.argv[3]);
+ data ['workflow'] =3D workflow_dict;
+ =20
+ =20
+except IndexError:
+ pass
+
+submit( sys.argv[1], sys.argv[2], data )
https://bitbucket.org/galaxy/galaxy-central/changeset/67e4caf2a34f/
changeset: 67e4caf2a34f
user: rpark37
date: 2012-02-01 22:28:00
summary: Updated import statements in workflows api controller
affected #: 1 file
diff -r 849e1713f613a1932595b82fcd0e65a19bf5e366 -r 67e4caf2a34f4dc6ae37697=
04696b18db98a3540 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -11,6 +11,21 @@
from galaxy.workflow.modules import module_factory
from galaxy.jobs.actions.post import ActionBox
=20
+# ------------------------------------------------------------------------=
---------------------- #
+# ------------------------------------------------------------------------=
---------------------- #
+# ---- RPARK EDITS ---- #
+import pkg_resources
+pkg_resources.require( "simplejson" )
+from galaxy import model
+from galaxy.web.controllers.workflow import attach_ordered_steps
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.workflow.modules import *
+from galaxy.model.item_attrs import *
+
+# ------------------------------------------------------------------------=
---------------------- #
+# ------------------------------------------------------------------------=
---------------------- #=20
+
+
log =3D logging.getLogger(__name__)
=20
class WorkflowsAPIController(BaseAPIController):
https://bitbucket.org/galaxy/galaxy-central/changeset/7738795047c6/
changeset: 7738795047c6
user: rpark37
date: 2012-02-01 23:22:50
summary: updated workflow_dict function for returning a selected workfl=
ow as a json object via API
affected #: 1 file
diff -r 67e4caf2a34f4dc6ae3769704696b18db98a3540 -r 7738795047c6a2af15d6b8a=
58173212b95c9f528 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -285,8 +285,6 @@
GET /api/workflows/{encoded_workflow_id}/download
Returns a selected workflow as a json dictionary.=20
"""
- print "workflow controller: workflow dict called"
- print workflow_id
=20
try:
stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
https://bitbucket.org/galaxy/galaxy-central/changeset/a81a7e9ef257/
changeset: a81a7e9ef257
user: rpark37
date: 2012-02-08 23:23:58
summary: Updated import new workflow function
affected #: 1 file
diff -r 7738795047c6a2af15d6b8a58173212b95c9f528 -r a81a7e9ef257d26ee0fdcdc=
d124f6205a4692653 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -279,7 +279,7 @@
# --------------------------------------------------------------------=
-------------------------- #
# --------------------------------------------------------------------=
-------------------------- #
@web.expose_api
- @web.json
+ #(a)web.json
def workflow_dict( self, trans, workflow_id, **kwd ):
"""
GET /api/workflows/{encoded_workflow_id}/download
@@ -297,8 +297,9 @@
trans.response.status =3D 400
return("Workflow is not owned by or shared with current us=
er")
=20
- return self._workflow_to_dict( trans, stored_workflow )
- =20
+ ret_dict =3D self._workflow_to_dict( trans, stored_workflow );
+ return ret_dict
+ =20
@web.expose_api
def delete( self, trans, id, **kwd ): =20
"""
@@ -352,8 +353,6 @@
# api encoded, id=20
encoded_id =3D trans.security.encode_id(workflow_id);
=20
- =20
- =20
# return list
rval=3D [];
=20
@@ -362,8 +361,7 @@
=20
rval.append(item); =20
=20
- return rval;
- =20
+ return item;
=20
def _workflow_from_dict( self, trans, data, source=3DNone ):
"""
https://bitbucket.org/galaxy/galaxy-central/changeset/edcfb659dc4f/
changeset: edcfb659dc4f
user: rpark37
date: 2012-03-16 22:23:15
summary: Updated workflow API
affected #: 1 file
diff -r a81a7e9ef257d26ee0fdcdcd124f6205a4692653 -r edcfb659dc4fc03e4385d63=
e36e5df380a20c898 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -336,7 +336,7 @@
@web.expose_api
def import_new_workflow(self, trans, payload, **kwd):
"""
- POST /api/workflows
+ POST /api/workflows/upload
Importing dynamic workflows from the api. Return newly generated w=
orkflow id.
Author: rpark=20
=20
@@ -344,7 +344,7 @@
"""
=20
#import pdb; pdb.set_trace()
- =20
+ =20
data =3D payload['workflow'];
workflow, missing_tool_tups =3D self._workflow_from_dict( trans, d=
ata, source=3D"API" )
=20
https://bitbucket.org/galaxy/galaxy-central/changeset/0f3e6e68d8f5/
changeset: 0f3e6e68d8f5
user: rpark37
date: 2012-04-11 05:00:41
summary: Updated notes on how to run api/workflow_execute_parameters.py
affected #: 2 files
diff -r edcfb659dc4fc03e4385d63e36e5df380a20c898 -r 0f3e6e68d8f53ffdaee2f37=
1cc3d21a0d6a3b3aa scripts/api/workflow_execute_parameters.py
--- /dev/null
+++ b/scripts/api/workflow_execute_parameters.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+"""
+Execute workflows from the command line.
+Example calls:
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
+"""
+
+"""
+python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
+
+'param=3Dtool=3Dname=3Dvalue'
+
+Example=20
+python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbwa'=20
+
+python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
+
+python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpea=
kcalling_spp=3Dwindow_size=3D1000'=20
+
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+
+def main():
+ try:
+ print("workflow_execute:py:");
+ data =3D {}
+ data['workflow_id'] =3D sys.argv[3]
+ data['history'] =3D sys.argv[4]
+ data['ds_map'] =3D {}
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ data['parameters'] =3D {};
+
+ # DBTODO If only one input is given, don't require a step
+ # mapping, just use it for everything?
+ for v in sys.argv[5:]:
+ print("Multiple arguments ");
+ print(v);
+
+ try:
+ step, src, ds_id =3D v.split('=3D');
+ data['ds_map'][step] =3D {'src':src, 'id':ds_id};
+
+ except ValueError:
+ print("VALUE ERROR:");
+ wtype, wtool, wparam, wvalue =3D v.split('=3D');
+ data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
+
+
+ #########################################################
+ ### MY EDITS ############################################
+ ### Trying to pass in parameter for my own dictionary ###
+ #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
+ #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
+
+ except IndexError:
+ print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
+ sys.exit(1)
+ submit( sys.argv[1], sys.argv[2], data )
+
+if __name__ =3D=3D '__main__':
+ main()
+
diff -r edcfb659dc4fc03e4385d63e36e5df380a20c898 -r 0f3e6e68d8f53ffdaee2f37=
1cc3d21a0d6a3b3aa scripts/api/workflow_execute_rpark.py
--- a/scripts/api/workflow_execute_rpark.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-"""
-Execute workflows from the command line.
-Example calls:
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
-"""
-
-"""
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
-
-'param=3Dtool=3Dname=3Dvalue'
-
-Example=20
-python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbwa'=20
-
-python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
-
-python workflow_execute_rpark.py 35a24ae2643785ff3d046c98ea362c7f http://l=
ocalhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d386=
79e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=
=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpeakca=
lling_spp=3Dwindow_size=3D1000'=20
-
-"""
-
-import os, sys
-sys.path.insert( 0, os.path.dirname( __file__ ) )
-from common import submit
-
-
-def main():
- try:
- print("workflow_execute:py:");
- data =3D {}
- data['workflow_id'] =3D sys.argv[3]
- data['history'] =3D sys.argv[4]
- data['ds_map'] =3D {}
-
- #########################################################
- ### MY EDITS ############################################
- ### Trying to pass in parameter for my own dictionary ###
- data['parameters'] =3D {};
-
- # DBTODO If only one input is given, don't require a step
- # mapping, just use it for everything?
- for v in sys.argv[5:]:
- print("Multiple arguments ");
- print(v);
-
- try:
- step, src, ds_id =3D v.split('=3D');
- data['ds_map'][step] =3D {'src':src, 'id':ds_id};
-
- except ValueError:
- print("VALUE ERROR:");
- wtype, wtool, wparam, wvalue =3D v.split('=3D');
- data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
-
-
- #########################################################
- ### MY EDITS ############################################
- ### Trying to pass in parameter for my own dictionary ###
- #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
- #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
-
- except IndexError:
- print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
- sys.exit(1)
- submit( sys.argv[1], sys.argv[2], data )
-
-if __name__ =3D=3D '__main__':
- main()
-
https://bitbucket.org/galaxy/galaxy-central/changeset/414b1b3fb029/
changeset: 414b1b3fb029
user: rpark37
date: 2012-04-11 05:05:23
summary: Cleaned up code for api/workflows.py
affected #: 1 file
diff -r 0f3e6e68d8f53ffdaee2f371cc3d21a0d6a3b3aa -r 414b1b3fb029a1acdd925b5=
bef41d2a38415c695 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -104,10 +104,7 @@
### RPARK: dictionary containing which workflows to change and edi=
t ###
param_map =3D {};
if (payload.has_key('parameters') ):
- #if (payload['parameters']):
param_map =3D payload['parameters'];
- print("PARAMETER MAP:");
- print(param_map);
# ----------------------------------------------------------------=
--------------- # =20
=20
=20
@@ -200,24 +197,11 @@
=20
####################################################
####################################################
- #print("CHECKING WORKFLOW STEPS:")
- #print(step.tool_id);
- #print(step.state.inputs);
- #print("upgard messages");
- #print(step.state);
- #print("\n");
# RPARK: IF TOOL_NAME IN PARAMETER MAP #
if step.tool_id in param_map:
- #print("-------------------------FOUND IN PARAMETER DI=
CTIONARY")
- #print(param_map[step.tool_id]);
change_param =3D param_map[step.tool_id]['param'];
change_value =3D param_map[step.tool_id]['value'];
- #step.state.inputs['refGenomeSource']['index'] =3D "cr=
apolo";
- #print(step.state.inputs[change_param]);
step.state.inputs[change_param] =3D change_value;
- #print(step.state.inputs[change_param]);
- #print(param_map[step.tool_id][change_value]);
- #print("----------------------------------------------=
----")
####################################################
####################################################
=20
https://bitbucket.org/galaxy/galaxy-central/changeset/8c33567aeca5/
changeset: 8c33567aeca5
user: rpark37
date: 2012-08-03 08:09:05
summary: Cleaned up codebase for attempting adding api additions to the=
galaxy main branch
affected #: 3 files
diff -r 414b1b3fb029a1acdd925b5bef41d2a38415c695 -r 8c33567aeca58c4e7306d3b=
eef590b82cff90be2 scripts/api/workflow_delete.py
--- /dev/null
+++ b/scripts/api/workflow_delete.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+"""
+# ---------------------------------------------- #
+# PARKLAB, Author: RPARK
+API example script for deleting workflows=20
+# ---------------------------------------------- #
+
+Example calls:
+python workflow_delete.py <api_key><galaxy_url>/api/workflows/<workflow id=
> True
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import delete
+
+try:
+ assert sys.argv[2]
+except IndexError:
+ print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sy=
s.argv[0] )
+ sys.exit( 1 )
+try:
+ data =3D {}
+ data[ 'purge' ] =3D sys.argv[3]
+except IndexError:
+ pass
+
+delete( sys.argv[1], sys.argv[2], data )
diff -r 414b1b3fb029a1acdd925b5bef41d2a38415c695 -r 8c33567aeca58c4e7306d3b=
eef590b82cff90be2 scripts/api/workflow_delete_workflow_rpark.py
--- a/scripts/api/workflow_delete_workflow_rpark.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-"""
-# Author: RPARK
-API script for deleting workflows=20
-"""
-
-import os, sys
-sys.path.insert( 0, os.path.dirname( __file__ ) )
-from common import delete
-
-try:
- assert sys.argv[2]
-except IndexError:
- print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sy=
s.argv[0] )
- sys.exit( 1 )
-try:
- data =3D {}
- data[ 'purge' ] =3D sys.argv[3]
-except IndexError:
- pass
-
-delete( sys.argv[1], sys.argv[2], data )
diff -r 414b1b3fb029a1acdd925b5bef41d2a38415c695 -r 8c33567aeca58c4e7306d3b=
eef590b82cff90be2 scripts/api/workflow_execute_parameters.py
--- a/scripts/api/workflow_execute_parameters.py
+++ b/scripts/api/workflow_execute_parameters.py
@@ -1,23 +1,13 @@
#!/usr/bin/env python
"""
+# ---------------------------------------------- #
+# PARKLAB, Author: RPARK
+# ---------------------------------------------- #
+
Execute workflows from the command line.
Example calls:
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'Test API History' '38=3Dldda=3D0qr350234d2d192f'
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f'
-"""
-
-"""
-python workflow_execute.py <api_key><galaxy_url>/api/workflows f2db41e1fa3=
31b3e 'hist_id=3Da912e9e5d84530d4' '38=3Dhda=3D03501d7626bd192f' 'param=3Dt=
ool=3Dname=3Dvalue'=20
-
-'param=3Dtool=3Dname=3Dvalue'
-
-Example=20
-python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbwa'=20
-
-python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Darachne' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue'
-
-python workflow_execute_parameters.py 35a24ae2643785ff3d046c98ea362c7f htt=
p://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da79=
9d38679e985db' '70=3Dld=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Dalig=
ner=3Dbowtie' 'param=3Dbowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpea=
kcalling_spp=3Dwindow_size=3D1000'=20
-
+python workflow_execute.py <api_key><galaxy_url>/api/workflows <workflow_i=
d> 'hist_id=3D<history_id>' '38=3Dhda=3D<file_id>' 'param=3Dtool=3Dname=3Dv=
alue'=20
+python workflow_execute_parameters.py <api_key> http://localhost:8080/api/=
workflows 1cd8e2f6b131e891 'Test API' '69=3Dld=3Da799d38679e985db' '70=3Dld=
=3D33b43b4e7093c91f' 'param=3Dpeakcalling_spp=3Daligner=3Dbowtie' 'param=3D=
bowtie_wrapper=3DsuppressHeader=3DTrue' 'param=3Dpeakcalling_spp=3Dwindow_s=
ize=3D1000'=20
"""
=20
import os, sys
@@ -34,7 +24,6 @@
data['ds_map'] =3D {}
=20
#########################################################
- ### MY EDITS ############################################
### Trying to pass in parameter for my own dictionary ###
data['parameters'] =3D {};
=20
@@ -51,14 +40,10 @@
except ValueError:
print("VALUE ERROR:");
wtype, wtool, wparam, wvalue =3D v.split('=3D');
- data['parameters'][wtool] =3D {'param':wparam, 'value':wva=
lue}
-
-
- #########################################################
- ### MY EDITS ############################################
- ### Trying to pass in parameter for my own dictionary ###
- #data['parameters']['bowtie'] =3D {'param':'stepSize', 'value':100}
- #data['parameters']['sam_to_bam'] =3D {'param':'genome', 'value':'=
hg18'}
+ try:
+ data['parameters'][wtool] =3D {'param':wparam, 'value'=
:wvalue}
+ except ValueError:
+ print("TOOL ID ERROR:");
=20
except IndexError:
print 'usage: %s key url workflow_id history step=3Dsrc=3Ddataset_=
id' % os.path.basename(sys.argv[0])
https://bitbucket.org/galaxy/galaxy-central/changeset/6a644558eed7/
changeset: 6a644558eed7
user: dannon
date: 2012-08-29 16:17:53
summary: Merge of rpark's workflow API changes to support parameter exe=
cution and workflow creation.
TODO: Refactor copied workflow methods back out.
affected #: 2 files
diff -r 8c33567aeca58c4e7306d3beef590b82cff90be2 -r 6a644558eed7aba74a74060=
ccf0aa4ec211809c2 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -10,25 +10,12 @@
from galaxy.web.base.controller import BaseAPIController, url_for
from galaxy.workflow.modules import module_factory
from galaxy.jobs.actions.post import ActionBox
-
-# ------------------------------------------------------------------------=
---------------------- #
-# ------------------------------------------------------------------------=
---------------------- #
-# ---- RPARK EDITS ---- #
-import pkg_resources
-pkg_resources.require( "simplejson" )
-from galaxy import model
+from galaxy.model.item_attrs import UsesAnnotations
from galaxy.web.controllers.workflow import attach_ordered_steps
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.workflow.modules import *
-from galaxy.model.item_attrs import *
-
-# ------------------------------------------------------------------------=
---------------------- #
-# ------------------------------------------------------------------------=
---------------------- #=20
-
=20
log =3D logging.getLogger(__name__)
=20
-class WorkflowsAPIController(BaseAPIController):
+class WorkflowsAPIController(BaseAPIController, UsesAnnotations):
@web.expose_api
def index(self, trans, **kwd):
"""
@@ -100,16 +87,16 @@
However, we will import them if installed_repository_file is speci=
fied
"""
=20
- # ----------------------------------------------------------------=
--------------- # =20
+ # ----------------------------------------------------------------=
--------------- #
### RPARK: dictionary containing which workflows to change and edi=
t ###
param_map =3D {};
if (payload.has_key('parameters') ):
param_map =3D payload['parameters'];
- # ----------------------------------------------------------------=
--------------- # =20
- =20
+ # ----------------------------------------------------------------=
--------------- #
=20
- =20
- =20
+
+
+
if 'workflow_id' not in payload:
# create new
if 'installed_repository_file' in payload:
@@ -194,7 +181,7 @@
# are not persisted so we need to do it every time)
step.module.add_dummy_datasets( connections=3Dstep.input_c=
onnections )
step.state =3D step.module.state
- =20
+
####################################################
####################################################
# RPARK: IF TOOL_NAME IN PARAMETER MAP #
@@ -204,7 +191,7 @@
step.state.inputs[change_param] =3D change_value;
####################################################
####################################################
- =20
+
if step.tool_errors:
trans.response.status =3D 400
return "Workflow cannot be run because of validation e=
rrors in some steps: %s" % step_errors
@@ -267,40 +254,40 @@
def workflow_dict( self, trans, workflow_id, **kwd ):
"""
GET /api/workflows/{encoded_workflow_id}/download
- Returns a selected workflow as a json dictionary.=20
+ Returns a selected workflow as a json dictionary.
"""
- =20
+
try:
stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
except Exception,e:
return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
- =20
- # check to see if user has permissions to selected workflow=20
+
+ # check to see if user has permissions to selected workflow
if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
trans.response.status =3D 400
return("Workflow is not owned by or shared with current us=
er")
- =20
+
ret_dict =3D self._workflow_to_dict( trans, stored_workflow );
return ret_dict
- =20
+
@web.expose_api
- def delete( self, trans, id, **kwd ): =20
+ def delete( self, trans, id, **kwd ):
"""
DELETE /api/workflows/{encoded_workflow_id}
Deletes a specified workflow
Author: rpark
- =20
+
copied from galaxy.web.controllers.workflows.py (delete)
"""
workflow_id =3D id;
- =20
+
try:
stored_workflow =3D trans.sa_session.query(self.app.model.Stor=
edWorkflow).get(trans.security.decode_id(workflow_id))
except Exception,e:
return ("Workflow with ID=3D'%s' can not be found\n Exception:=
%s") % (workflow_id, str( e ))
- =20
- # check to see if user has permissions to selected workflow=20
+
+ # check to see if user has permissions to selected workflow
if stored_workflow.user !=3D trans.user and not trans.user_is_admi=
n():
if trans.sa_session.query(trans.app.model.StoredWorkflowUserSh=
areAssociation).filter_by(user=3Dtrans.user, stored_workflow=3Dstored_workf=
low).count() =3D=3D 0:
trans.response.status =3D 400
@@ -309,42 +296,37 @@
#Mark a workflow as deleted
stored_workflow.deleted =3D True
trans.sa_session.flush()
- =20
- # Python Debugger
- #import pdb; pdb.set_trace()
- =20
+
# TODO: Unsure of response message to let api know that a workflow=
was successfully deleted
#return 'OK'
return ( "Workflow '%s' successfully deleted" % stored_workflow.na=
me )
- =20
+
@web.expose_api
def import_new_workflow(self, trans, payload, **kwd):
"""
POST /api/workflows/upload
Importing dynamic workflows from the api. Return newly generated w=
orkflow id.
- Author: rpark=20
- =20
+ Author: rpark
+
# currently assumes payload['workflow'] is a json representation o=
f a workflow to be inserted into the database
"""
- =20
- #import pdb; pdb.set_trace()
- =20
+
data =3D payload['workflow'];
workflow, missing_tool_tups =3D self._workflow_from_dict( trans, d=
ata, source=3D"API" )
- =20
- # galaxy workflow newly created id =20
+
+ # galaxy workflow newly created id
workflow_id =3D workflow.id;
- # api encoded, id=20
+ # api encoded, id
encoded_id =3D trans.security.encode_id(workflow_id);
- =20
+
# return list
rval=3D [];
- =20
+
item =3D workflow.get_api_value(value_mapper=3D{'id':trans.securit=
y.encode_id})
item['url'] =3D url_for('workflow', id=3Dencoded_id)
- =20
- rval.append(item); =20
- =20
+
+ rval.append(item);
+
return item;
=20
def _workflow_from_dict( self, trans, data, source=3DNone ):
@@ -392,21 +374,19 @@
workflow.has_errors =3D True
# Stick this in the step temporarily
step.temp_input_connections =3D step_dict['input_connections']
- =20
# Save step annotation.
annotation =3D step_dict[ 'annotation' ]
- if annotation:
- annotation =3D sanitize_html( annotation, 'utf-8', 'text/h=
tml' )
+ #if annotation:
+ #annotation =3D sanitize_html( annotation, 'utf-8', 'text/=
html' )
# ------------------------------------------ #
# RPARK REMOVING: user annotation b/c of API
#self.add_item_annotation( trans.sa_session, trans.get_use=
r(), step, annotation )
# ------------------------------------------ #
- =20
# Unpack and add post-job actions.
post_job_actions =3D step_dict.get( 'post_job_actions', {} )
for name, pja_dict in post_job_actions.items():
- pja =3D PostJobAction( pja_dict[ 'action_type' ],=20
- step, pja_dict[ 'output_name' ],=20
+ pja =3D PostJobAction( pja_dict[ 'action_type' ],
+ step, pja_dict[ 'output_name' ],
pja_dict[ 'action_arguments' ] )
# Second pass to deal with connections between steps
for step in steps:
@@ -431,14 +411,14 @@
trans.sa_session.add( stored )
trans.sa_session.flush()
return stored, missing_tool_tups
- =20
+
def _workflow_to_dict( self, trans, stored ):
"""
RPARK: copied from galaxy.web.controllers.workflows.py
Converts a workflow to a dict of attributes suitable for exporting.
"""
workflow =3D stored.latest_workflow
- =20
+
### ----------------------------------- ###
## RPARK EDIT ##
workflow_annotation =3D self.get_item_annotation_obj( trans.sa_ses=
sion, trans.user, stored )
@@ -446,8 +426,8 @@
if workflow_annotation:
annotation_str =3D workflow_annotation.annotation
### ----------------------------------- ###
- =20
- =20
+
+
# Pack workflow data into a dictionary and return
data =3D {}
data['a_galaxy_workflow'] =3D 'true' # Placeholder for identifying=
galaxy workflow
@@ -457,22 +437,22 @@
## RPARK EDIT ##
data['annotation'] =3D annotation_str
### ----------------------------------- ###
- =20
+
data['steps'] =3D {}
# For each step, rebuild the form and encode the state
for step in workflow.steps:
# Load from database representation
module =3D module_factory.from_workflow_step( trans, step )
- =20
+
### ----------------------------------- ###
## RPARK EDIT ##
# Get user annotation.
step_annotation =3D self.get_item_annotation_obj(trans.sa_sess=
ion, trans.user, step )
annotation_str =3D ""
if step_annotation:
- annotation_str =3D step_annotation.annotation =20
+ annotation_str =3D step_annotation.annotation
### ----------------------------------- ###
- =20
+
# Step info
step_dict =3D {
'id': step.order_index,
@@ -484,18 +464,18 @@
'tool_errors': module.get_errors(),
## 'data_inputs': module.get_data_inputs(),
## 'data_outputs': module.get_data_outputs(),
- =20
+
### ----------------------------------- ###
## RPARK EDIT ##
'annotation' : annotation_str
### ----------------------------------- ###
- =20
+
}
# Add post-job actions to step dict.
if module.type =3D=3D 'tool':
pja_dict =3D {}
for pja in step.post_job_actions:
- pja_dict[pja.action_type+pja.output_name] =3D dict( ac=
tion_type =3D pja.action_type,=20
+ pja_dict[pja.action_type+pja.output_name] =3D dict( ac=
tion_type =3D pja.action_type,
outp=
ut_name =3D pja.output_name,
acti=
on_arguments =3D pja.action_arguments )
step_dict[ 'post_job_actions' ] =3D pja_dict
@@ -559,37 +539,4 @@
# Add to return value
data['steps'][step.order_index] =3D step_dict
return data
- =20
- def get_item_annotation_obj( self, db_session, user, item ):
- """=20
- RPARK: copied from galaxy.model.item_attr.py
- Returns a user's annotation object for an item. """
- # Get annotation association class.
- annotation_assoc_class =3D self._get_annotation_assoc_class( item )
- if not annotation_assoc_class:
- return None
- =20
- # Get annotation association object.
- annotation_assoc =3D db_session.query( annotation_assoc_class ).fi=
lter_by( user=3Duser )
- =20
- # TODO: use filtering like that in _get_item_id_filter_str()
- if item.__class__ =3D=3D galaxy.model.History:
- annotation_assoc =3D annotation_assoc.filter_by( history=3Dite=
m )
- elif item.__class__ =3D=3D galaxy.model.HistoryDatasetAssociation:
- annotation_assoc =3D annotation_assoc.filter_by( hda=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.StoredWorkflow:
- annotation_assoc =3D annotation_assoc.filter_by( stored_workfl=
ow=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.WorkflowStep:
- annotation_assoc =3D annotation_assoc.filter_by( workflow_step=
=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.Page:
- annotation_assoc =3D annotation_assoc.filter_by( page=3Ditem )
- elif item.__class__ =3D=3D galaxy.model.Visualization:
- annotation_assoc =3D annotation_assoc.filter_by( visualization=
=3Ditem )
- return annotation_assoc.first()
- =20
- def _get_annotation_assoc_class( self, item ):
- """=20
- RPARK: copied from galaxy.model.item_attr.py
- Returns an item's item-annotation association class. """
- class_name =3D '%sAnnotationAssociation' % item.__class__.__name__
- return getattr( galaxy.model, class_name, None )
+
diff -r 8c33567aeca58c4e7306d3beef590b82cff90be2 -r 6a644558eed7aba74a74060=
ccf0aa4ec211809c2 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -151,20 +151,11 @@
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_=
prefix=3D'/api' )
#webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_i=
d}/library/{library_id}', controller=3D'workflows', action=3D'run', workflo=
w_id=3DNone, library_id=3DNone, conditions=3Ddict(method=3D["GET"]) )
=20
- # ---------------------------------------------- #
- # ---------------------------------------------- #
- # RPARK EDIT=20
- =20
- # How to extend API: url_mapping=20
- # "POST /api/workflows/import" =3D> ``workflows.import_workflow(=
)``.
- # Defines a named route "import_workflow".
- webapp.api_mapper.connect("import_workflow", "/api/workflows/uploa=
d", controller=3D"workflows", action=3D"import_new_workflow", conditions=3D=
dict(method=3D["POST"]))
- webapp.api_mapper.connect("workflow_dict", '/api/workflows/downloa=
d/{workflow_id}', controller=3D'workflows', action=3D'workflow_dict', condi=
tions=3Ddict(method=3D['GET']))
- =20
- #import pdb; pdb.set_trace() =20
- # ---------------------------------------------- #
- # ---------------------------------------------- #
- =20
+ # "POST /api/workflows/import" =3D> ``workflows.import_workflow()``.
+ # Defines a named route "import_workflow".
+ webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", =
controller=3D"workflows", action=3D"import_new_workflow", conditions=3Ddict=
(method=3D["POST"]))
+ webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{w=
orkflow_id}', controller=3D'workflows', action=3D'workflow_dict', condition=
s=3Ddict(method=3D['GET']))
+
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
https://bitbucket.org/galaxy/galaxy-central/changeset/07045f489517/
changeset: 07045f489517
user: dannon
date: 2012-08-29 16:18:23
summary: Merge.
affected #: 36 files
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -719,7 +719,49 @@
pass
=20
class Newick( Text ):
- pass
+ """New Hampshire/Newick Format"""
+ file_ext =3D "nhx"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """ Returning false as the newick format is too general and cannot=
be sniffed."""
+ return False
+
+
+class Nexus( Text ):
+ """Nexus format as used By Paup, Mr Bayes, etc"""
+ file_ext =3D "nex"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """All Nexus Files Simply puts a '#NEXUS' in its first line"""
+ f =3D open(filename, "r")
+ firstline =3D f.readline().upper()
+ f.close()
+
+ if "#NEXUS" in firstline:
+ return True
+ else:
+ return False
+
=20
# ------------- Utility methods --------------
=20
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py
+++ b/lib/galaxy/datatypes/sniff.py
@@ -6,6 +6,7 @@
from galaxy import util
from galaxy.datatypes.checkers import *
from galaxy.datatypes.binary import unsniffable_binary_formats
+from encodings import search_function as encodings_search_function
=20
log =3D logging.getLogger(__name__)
=20
@@ -15,7 +16,7 @@
full_path =3D os.path.join(path, 'test', fname)
return full_path
=20
-def stream_to_open_named_file( stream, fd, filename ):
+def stream_to_open_named_file( stream, fd, filename, source_encoding=3DNon=
e, source_error=3D'strict', target_encoding=3DNone, target_error=3D'strict'=
):
"""Writes a stream to the provided file descriptor, returns the file's=
name and bool( is_multi_byte ). Closes file descriptor"""
#signature and behavor is somewhat odd, due to backwards compatibility=
, but this can/should be done better
CHUNK_SIZE =3D 1048576
@@ -23,6 +24,10 @@
is_compressed =3D False
is_binary =3D False
is_multi_byte =3D False
+ if not target_encoding or not encodings_search_function( target_encodi=
ng ):
+ target_encoding =3D util.DEFAULT_ENCODING #utf-8
+ if not source_encoding:
+ source_encoding =3D util.DEFAULT_ENCODING #sys.getdefaultencoding(=
) would mimic old behavior (defaults to ascii)
while 1:
chunk =3D stream.read( CHUNK_SIZE )
if not chunk:
@@ -42,13 +47,12 @@
chars =3D chunk[:100]
is_multi_byte =3D util.is_multi_byte( chars )
if not is_multi_byte:
- for char in chars:
- if ord( char ) > 128:
- is_binary =3D True
- break
+ is_binary =3D util.is_binary( chunk )
data_checked =3D True
if not is_compressed and not is_binary:
- os.write( fd, chunk.encode( "utf-8" ) )
+ if not isinstance( chunk, unicode ):
+ chunk =3D chunk.decode( source_encoding, source_error )
+ os.write( fd, chunk.encode( target_encoding, target_error ) )
else:
# Compressed files must be encoded after they are uncompressed=
in the upload utility,
# while binary files should not be encoded at all.
@@ -56,10 +60,10 @@
os.close( fd )
return filename, is_multi_byte
=20
-def stream_to_file( stream, suffix=3D'', prefix=3D'', dir=3DNone, text=3DF=
alse ):
+def stream_to_file( stream, suffix=3D'', prefix=3D'', dir=3DNone, text=3DF=
alse, **kwd ):
"""Writes a stream to a temporary file, returns the temporary file's n=
ame"""
fd, temp_name =3D tempfile.mkstemp( suffix=3Dsuffix, prefix=3Dprefix, =
dir=3Ddir, text=3Dtext )
- return stream_to_open_named_file( stream, fd, temp_name )
+ return stream_to_open_named_file( stream, fd, temp_name, **kwd )
=20
def check_newlines( fname, bytes_to_read=3D52428800 ):
"""
@@ -305,14 +309,9 @@
else:
for hdr in headers:
for char in hdr:
- if len( char ) > 1:
- for c in char:
- if ord( c ) > 128:
- is_binary =3D True
- break
- elif ord( char ) > 128:
- is_binary =3D True
- break
+ #old behavior had 'char' possibly having length > 1,
+ #need to determine when/if this occurs=20
+ is_binary =3D util.is_binary( char )
if is_binary:
break
if is_binary:
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -76,3 +76,24 @@
dataset.blurb =3D 'file purged from disk'
def sniff( self, filename ):
return False
+
+class Phyloxml( GenericXml ):
+ """Format for defining phyloxml data http://www.phyloxml.org/"""
+ file_ext =3D "phyloxml"
+ def set_peek( self, dataset, is_multi_byte=3DFalse ):
+ """Set the peek and blurb text"""
+ if not dataset.dataset.purged:
+ dataset.peek =3D data.get_file_peek( dataset.file_name, is_mul=
ti_byte=3Dis_multi_byte )
+ dataset.blurb =3D 'Phyloxml data'
+ else:
+ dataset.peek =3D 'file does not exist'
+ dataset.blurb =3D 'file purged from disk'
+
+ def sniff( self, filename ):
+ """"Checking for keyword - 'phyloxml' always in lowercase in the f=
irst few lines"""
+ f =3D open(filename, "r")
+ firstlines =3D "".join(f.readlines(5))
+ f.close()
+ if "phyloxml" in firstlines:
+ return True
+ return False
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -471,7 +471,7 @@
job.user.total_disk_usage +=3D bytes
=20
# fix permissions
- for path in [ dp.real_path for dp in self.get_output_fnames() ]:
+ for path in [ dp.real_path for dp in self.get_mutable_output_fname=
s() ]:
util.umask_fix_perms( path, self.app.config.umask, 0666, self.=
app.config.gid )
self.sa_session.flush()
log.debug( 'job %d ended' % self.job_id )
@@ -679,6 +679,11 @@
self.compute_outputs()
return self.output_paths
=20
+ def get_mutable_output_fnames( self ):
+ if self.output_paths is None:
+ self.compute_outputs()
+ return filter( lambda dsp: dsp.mutable, self.output_paths )
+
def get_output_hdas_and_fnames( self ):
if self.output_hdas_and_paths is None:
self.compute_outputs()
@@ -686,10 +691,11 @@
=20
def compute_outputs( self ) :
class DatasetPath( object ):
- def __init__( self, dataset_id, real_path, false_path =3D None=
):
+ def __init__( self, dataset_id, real_path, false_path =3D None=
, mutable =3D True ):
self.dataset_id =3D dataset_id
self.real_path =3D real_path
self.false_path =3D false_path
+ self.mutable =3D mutable
def __str__( self ):
if self.false_path is None:
return self.real_path
@@ -706,13 +712,13 @@
self.output_hdas_and_paths =3D {}
for name, hda in [ ( da.name, da.dataset ) for da in job.outpu=
t_datasets + job.output_library_datasets ]:
false_path =3D os.path.abspath( os.path.join( self.working=
_directory, "galaxy_dataset_%d.dat" % hda.dataset.id ) )
- dsp =3D DatasetPath( hda.dataset.id, hda.dataset.file_name=
, false_path )
+ dsp =3D DatasetPath( hda.dataset.id, hda.dataset.file_name=
, false_path, mutable =3D hda.dataset.external_filename is None )
self.output_paths.append( dsp )
self.output_hdas_and_paths[name] =3D hda, dsp
if special:
false_path =3D os.path.abspath( os.path.join( self.working=
_directory, "galaxy_dataset_%d.dat" % special.dataset.id ) )
else:
- results =3D [ ( da.name, da.dataset, DatasetPath( da.dataset.d=
ataset.id, da.dataset.file_name ) ) for da in job.output_datasets + job.out=
put_library_datasets ]
+ results =3D [ ( da.name, da.dataset, DatasetPath( da.dataset.d=
ataset.id, da.dataset.file_name, mutable =3D da.dataset.dataset.external_fi=
lename is None ) ) for da in job.output_datasets + job.output_library_datas=
ets ]
self.output_paths =3D [t[2] for t in results]
self.output_hdas_and_paths =3D dict([(t[0], t[1:]) for t in r=
esults])
if special:
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -115,15 +115,16 @@
files =3D tar.getmembers()
for filename in files:
z =3D tar.extractfile(filename)
- try:
- chunk =3D z.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- log.error( 'Problem decompressing compressed data'=
)
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
+ while 1:
+ try:
+ chunk =3D z.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ log.error( 'Problem decompressing compressed d=
ata' )
+ exit()
+ if not chunk:
+ break
+ os.write( fd, chunk )
os.write( fd, '\n' )
os.close( fd )
tar.close()
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/model/item_attrs.py
--- a/lib/galaxy/model/item_attrs.py
+++ b/lib/galaxy/model/item_attrs.py
@@ -95,7 +95,7 @@
""" Returns a user's annotation string for an item. """
annotation_obj =3D self.get_item_annotation_obj( db_session, user,=
item )
if annotation_obj:
- return annotation_obj.annotation
+ return galaxy.util.unicodify( annotation_obj.annotation )
return None
=20
def get_item_annotation_obj( self, db_session, user, item ):
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -187,7 +187,9 @@
section.elems[ section_key ] =3D workflow
log.debug( "Loaded workflow: %s %s" % ( workfl=
ow_id, workflow.name ) )
elif section_key.startswith( 'label_' ):
- section.elems[ section_key ] =3D section_val
+ if section_val:
+ section.elems[ section_key ] =3D section_val
+ log.debug( "Loaded label: %s" % ( section_val.=
text ) )
self.tool_panel[ key ] =3D section
def load_integrated_tool_panel_keys( self ):
"""
@@ -215,12 +217,12 @@
section.elems[ key ] =3D None
elif section_elem.tag =3D=3D 'label':
key =3D 'label_%s' % section_elem.get( 'id' )
- section.elems[ key ] =3D ToolSectionLabel( section=
_elem )
+ section.elems[ key ] =3D None
key =3D 'section_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] =3D section
elif elem.tag =3D=3D 'label':
key =3D 'label_%s' % elem.get( 'id' )
- self.integrated_tool_panel[ key ] =3D ToolSectionLabel( el=
em )
+ self.integrated_tool_panel[ key ] =3D None
def write_integrated_tool_panel_config_file( self ):
"""
Write the current in-memory version of the integrated_tool_panel.x=
ml file to disk. Since Galaxy administrators=20
@@ -254,10 +256,11 @@
if section_item:
os.write( fd, ' <workflow id=3D"%s" />\=
n' % section_item.id )
elif section_key.startswith( 'label_' ):
- label_id =3D section_item.id or ''
- label_text =3D section_item.text or ''
- label_version =3D section_item.version or ''
- os.write( fd, ' <label id=3D"%s" text=3D"%s=
" version=3D"%s" />\n' % ( label_id, label_text, label_version ) )
+ if section_item:
+ label_id =3D section_item.id or ''
+ label_text =3D section_item.text or ''
+ label_version =3D section_item.version or ''
+ os.write( fd, ' <label id=3D"%s" text=
=3D"%s" version=3D"%s" />\n' % ( label_id, label_text, label_version ) )
os.write( fd, ' </section>\n' )
os.write( fd, '</toolbox>\n' )
os.close( fd )
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -54,6 +54,7 @@
self._log( self.locations )
self._log( 'Indexer %s completed successfully.' % inde=
xer )
self._flush_files()
+ exit(0)
=20
def _check_link( self ):
self._log( 'Checking symlink to %s' % self.fafile )
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -34,6 +34,9 @@
=20
gzip_magic =3D '\037\213'
bz2_magic =3D 'BZh'
+DEFAULT_ENCODING =3D 'utf-8'
+NULL_CHAR =3D '\000'
+BINARY_CHARS =3D [ NULL_CHAR ]
=20
from inflection import Inflector, English
inflector =3D Inflector(English)
@@ -57,6 +60,32 @@
return True
return False
=20
+def is_binary( value, binary_chars=3DNone ):
+ """
+ File is binary if it contains a null-byte by default (e.g. behavior of=
grep, etc.).
+ This may fail for utf-16 files, but so would ASCII encoding.
+ >>> is_binary( string.printable )
+ False
+ >>> is_binary( '\\xce\\x94' )
+ False
+ >>> is_binary( '\\000' )
+ True
+ """
+ if binary_chars is None:
+ binary_chars =3D BINARY_CHARS
+ for binary_char in binary_chars:
+ if binary_char in value:
+ return True
+ return False
+
+def get_charset_from_http_headers( headers, default=3DNone ):
+ rval =3D headers.get('content-type', None )
+ if rval and 'charset=3D' in rval:
+ rval =3D rval.split('charset=3D')[-1].split(';')[0].strip()
+ if rval:
+ return rval
+ return default
+
def synchronized(func):
"""This wrapper will serialize access to 'func' to a single thread. Us=
e it as a decorator."""
def caller(*params, **kparams):
@@ -333,6 +362,17 @@
else:
return amount[0:sfs] + '0'*(len(amount) - sfs)
=20
+def unicodify( value, encoding=3DDEFAULT_ENCODING, error=3D'replace', defa=
ult=3DNone ):
+ """
+ Returns a unicode string or None
+ """
+ if isinstance( value, unicode ):
+ return value
+ try:
+ return unicode( value, encoding, error )
+ except:
+ return default
+
def object_to_string( obj ):
return binascii.hexlify( pickle.dumps( obj, 2 ) )
=20
@@ -502,7 +542,7 @@
=20
def recursively_stringify_dictionary_keys( d ):
if isinstance(d, dict):
- return dict([(k.encode('utf-8'), recursively_stringify_dictionary_=
keys(v)) for k,v in d.iteritems()])
+ return dict([(k.encode( DEFAULT_ENCODING ), recursively_stringify_=
dictionary_keys(v)) for k,v in d.iteritems()])
elif isinstance(d, list):
return [recursively_stringify_dictionary_keys(x) for x in d]
else:
@@ -622,7 +662,7 @@
Sends an email.
"""
to =3D listify( to )
- msg =3D MIMEText( body )
+ msg =3D MIMEText( body.encode( 'ascii', 'replace' ) )
msg[ 'To' ] =3D ', '.join( to )
msg[ 'From' ] =3D frm
msg[ 'Subject' ] =3D subject
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -454,7 +454,7 @@
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, reposi=
tory )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repositor=
y.name )
+ return url_join( tool_shed_url, 'repos', repository.owner, repository.=
name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed =
datatypes_config."""
tree =3D ElementTree.parse( datatypes_config )
@@ -993,7 +993,7 @@
break
return converter_path, display_path
def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
- url =3D '%s/repository/get_ctx_rev?name=3D%s&owner=3D%s&changeset_revi=
sion=3D%s&webapp=3Dgalaxy' % ( tool_shed_url, name, owner, changeset_revisi=
on )
+ url =3D url_join( tool_shed_url, 'repository/get_ctx_rev?name=3D%s&own=
er=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % ( name, owner, changeset=
_revision ) )
response =3D urllib2.urlopen( url )
ctx_rev =3D response.read()
response.close()
@@ -1221,8 +1221,8 @@
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be u=
pdated."""
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, reposi=
tory )
- url =3D '%s/repository/get_changeset_revision_and_ctx_rev?name=3D%s&ow=
ner=3D%s&changeset_revision=3D%s' % \
- ( tool_shed_url, repository.name, repository.owner, repository.ins=
talled_changeset_revision )
+ url =3D url_join( tool_shed_url, 'repository/get_changeset_revision_an=
d_ctx_rev?name=3D%s&owner=3D%s&changeset_revision=3D%s' % \
+ ( repository.name, repository.owner, repository.installed_changese=
t_revision ) )
try:
response =3D urllib2.urlopen( url )
encoded_update_dict =3D response.read()
@@ -1645,3 +1645,8 @@
tool_shed_repository.status =3D status
sa_session.add( tool_shed_repository )
sa_session.flush()
+def url_join( *args ):
+ parts =3D []
+ for arg in args:
+ parts.append( arg.strip( '/' ) )
+ return '/'.join( parts )
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/__init__.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/__init__.py
@@ -0,0 +1,1 @@
+__author__ =3D 'Tomithy'
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/baseparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/baseparser.py
@@ -0,0 +1,125 @@
+import json
+
+class Node(object):
+ """Node class of PhyloTree, which represents a CLAUDE in a phylogeneti=
c tree"""
+ def __init__(self, nodeName, **kwargs):
+ """Creates a node and adds in the typical annotations"""
+ self.name, self.id =3D nodeName, kwargs.get("id", 0)
+ self.depth =3D kwargs.get("depth", 0)
+ self.children =3D []
+
+ self.isInternal =3D kwargs.get("isInternal", 0)
+ self.length, self.bootstrap =3D kwargs.get("length", 0), kwargs.ge=
t("bootstrap", None)
+ self.events =3D kwargs.get("events", "")
+
+ # clean up boot strap values
+ if self.bootstrap =3D=3D -1:
+ self.bootstrap =3D None
+
+ def addChildNode(self, child):
+ """Adds a child node to the current node"""
+ if isinstance(child, Node):
+ self.children.append(child)
+ else:
+ self.children +=3D child
+
+
+ def __str__(self):
+ return self.name + " id:" + str(self.id) + ", depth: " + str(self.=
depth)
+
+
+ def toJson(self):
+ """Converts the data in the node to a dict representation of json"=
""
+ thisJson =3D {
+ "name" : self.name,
+ "id" : self.id,
+ "depth" : self.depth,
+ "dist" : self.length
+ }
+ thisJson =3D self.addChildrenToJson(thisJson)
+ thisJson =3D self.addMiscToJson(thisJson)
+ return thisJson
+
+ def addChildrenToJson(self, jsonDict):
+ """Needs a special method to addChildren, such that the key does n=
ot appear in the Jsondict when the children is empty
+ this requirement is due to the layout algorithm used by d3 layout =
for hiding subtree """
+ if len(self.children) > 0:
+ children =3D [ node.toJson() for node in self.children]
+ jsonDict["children"] =3D children
+ return jsonDict
+
+
+ def addMiscToJson(self, jsonDict):
+ """Adds other misc attributes to json if they are present"""
+ if not self.events =3D=3D "":
+ jsonDict["events"] =3D self.events
+ if not self.bootstrap =3D=3D None:
+ jsonDict["bootstrap"] =3D self.bootstrap
+ return jsonDict
+
+
+
+class PhyloTree(object):
+ """Standardized python based class to represent the phylogenetic tree =
parsed from different
+ phylogenetic file formats."""
+
+ def __init__(self):
+ self.root, self.rootAttr =3D None, {}
+ self.nodes =3D {}
+ self.title =3D None
+ self.id =3D 1
+
+ def addAttributesToRoot(self, attrDict):
+ """Adds attributes to root, but first we put it in a temp store an=
d bind it with root when .toJson is called"""
+ for key, value in attrDict.items():
+ self.rootAttr[key] =3D value
+
+ def makeNode(self, nodeName, **kwargs):
+ """Called to make a node within PhyloTree, arbitrary kwargs can be=
passed to annotate nodes
+ Tracks the number of nodes via internally incremented id"""
+ kwargs["id"] =3D self.id
+ self.id +=3D 1
+ return Node(nodeName, **kwargs)
+
+ def addRoot(self, root):
+ """Creates a root for phyloTree"""
+ assert isinstance(root, Node)
+ root.parent =3D None
+ self.root =3D root
+
+ def generateJsonableDict(self):
+ """Changes itself into a dictonary by recurssively calling the toj=
son on all its nodes. Think of it
+ as a dict in an array of dict in an array of dict and so on..."""
+ jsonTree =3D ""
+ if self.root:
+ assert isinstance(self.root, Node)
+ jsonTree =3D self.root.toJson()
+ for key, value in self.rootAttr.items():
+ # transfer temporary stored attr to root
+ jsonTree[key] =3D value
+ else:
+ raise Exception("Root is not assigned!")
+ return jsonTree
+
+
+
+class Base_Parser(object):
+ """Base parsers contain all the methods to handle phylogeny tree creat=
ion and
+ converting the data to json that all parsers should have"""
+
+ def __init__(self):
+ self.phyloTrees =3D []
+
+ def parseFile(self, filePath):
+ """Base method that all phylogeny file parser should have"""
+ raise Exception("Base method for phylogeny file parsers is not imp=
lemented")
+
+ def toJson(self, jsonDict):
+ """Convenience method to get a json string from a python json dict=
"""
+ return json.dumps(jsonDict)
+
+ def _writeJsonToFile(self, filepath, json):
+ """Writes the file out to the system"""
+ f =3D open(filepath, "w")
+ f.writelines(json)
+ f.close()
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/newickparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/newickparser.py
@@ -0,0 +1,185 @@
+from baseparser import Base_Parser, PhyloTree
+import re
+
+class Newick_Parser(Base_Parser):
+ """For parsing trees stored in the newick format (.nhx)
+ It is necessarily more complex because this parser is later extended b=
y Nexus for parsing newick as well.."""
+
+
+ def __init__(self):
+ super(Newick_Parser, self).__init__()
+
+
+ def parseFile(self, filePath):
+ """Parses a newick file to obtain the string inside. Returns: json=
ableDict"""
+ with open(filePath, "r") as newickFile:
+ newickString =3D newickFile.read()
+ newickString =3D newickString.replace("\n", "").replace("\r", =
"")
+ return [self.parseData(newickString)], "Success"
+
+
+ def parseData(self, newickString):
+ """To be called on a newickString directly to parse it. Returns: j=
sonableDict"""
+ return self._parseNewickToJson(newickString)
+
+
+ def _parseNewickToJson(self, newickString, treeName=3DNone, nameMap=3D=
None):
+ """parses a newick representation of a tree into a PhyloTree data =
structure,
+ which can be easily converted to json"""
+ self.phyloTree =3D PhyloTree()
+ newickString =3D self.cleanNewickString(newickString)
+ if nameMap:
+ newickString =3D self._mapName(newickString, nameMap)
+
+ self.phyloTree.root =3D self.parseNode(newickString, 0)
+ if nameMap:
+ self.phyloTree.addAttributesToRoot({"treeName": treeName})
+
+ return self.phyloTree.generateJsonableDict()
+
+
+ def cleanNewickString(self, rawNewick):
+ """removing semi colon, and illegal json characters (\,',") and wh=
ite spaces"""
+ return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
+
+
+ def _makeNodesFromString(self, string, depth):
+ """elements separated by comma could be empty"""
+
+ if string.find("(") !=3D -1:
+ raise Exception("Tree is not well form, location: " + string)
+
+ childrenString =3D string.split(",")
+ childrenNodes =3D []
+
+ for childString in childrenString:
+ if len(childString) =3D=3D 0:
+ continue
+ nodeInfo =3D childString.split(":")
+ name, length, bootstrap =3D "", None, -1
+ if len(nodeInfo) =3D=3D 2: # has length info
+ length =3D nodeInfo[1]
+ # checking for bootstap values
+ name =3D nodeInfo[0]
+ try: # Nexus may bootstrap in names position
+ name =3D float(name)
+ if 0<=3D name <=3D 1:
+ bootstrap =3D name
+ elif 1 <=3D name <=3D 100:
+ bootstrap =3D name / 100
+ name =3D ""
+ except ValueError:
+ name =3D nodeInfo[0]
+ else:
+ name =3D nodeInfo[0] # string only contains name
+ node =3D self.phyloTree.makeNode(name, length=3Dlength, depth=
=3Ddepth, bootstrap=3D bootstrap)
+ childrenNodes +=3D [node]
+ return childrenNodes
+
+
+
+ def _mapName(self, newickString, nameMap):
+ """
+ Necessary to replace names of terms inside nexus representation
+ Also, its here because Mailaud's doesnt deal with id_strings outsi=
de of quotes(" ")
+ """
+ newString =3D ""
+ start =3D 0
+ end =3D 0
+
+ for i in xrange(len(newickString)):
+ if newickString[i] =3D=3D "(" or newickString[i] =3D=3D ",":
+ if re.match(r"[,(]", newickString[i+1:]):
+ continue
+ else:
+ end =3D i + 1
+ # i now refers to the starting position of the term to=
be replaced,
+ # we will next find j which is the ending pos of the t=
erm
+ for j in xrange(i+1, len(newickString)):
+ enclosingSymbol =3D newickString[j] # the immedi=
ate symbol after a common or left bracket which denotes the end of a term
+ if enclosingSymbol =3D=3D ")" or enclosingSymbol =
=3D=3D ":" or enclosingSymbol =3D=3D ",":
+ termToReplace =3D newickString[end:j]
+
+ newString +=3D newickString[start : end] + na=
meMap[termToReplace] #+ "'" "'" +
+ start =3D j
+ break
+
+ newString +=3D newickString[start:]
+ return newString
+
+
+ def parseNode(self, string, depth):
+ """ Recursive method for parsing newick string, works by stripping=
down the string into substring
+ of newick contained with brackers, which is used to call itself.
+ Eg ... ( A, B, (D, E)C, F, G ) ...
+ We will make the preceeding nodes first A, B, then the internal no=
de C, its children D, E,
+ and finally the succeeding nodes F, G"""
+
+ # Base case where there is only an empty string
+ if string =3D=3D "":
+ return
+ # Base case there its only an internal claude
+ if string.find("(") =3D=3D -1:
+ return self._makeNodesFromString(string, depth)
+
+ nodes, children =3D [], [] # nodes refer to the nodes on this=
level, children refers to the child of the
+ start =3D 0
+ lenOfPreceedingInternalNodeString =3D 0
+ bracketStack =3D []
+
+ for j in xrange(len(string)):
+ if string[j] =3D=3D "(": #finding the positions of all the =
open brackets
+ bracketStack.append(j)
+ continue
+ if string[j] =3D=3D ")": #finding the positions of all the =
closed brackets to extract claude
+ i =3D bracketStack.pop()
+
+ if len(bracketStack) =3D=3D 0: # is child of current node
+
+ InternalNode =3D None
+
+ #First flat call to make nodes of the same depth but f=
rom the preceeding string.
+ startSubstring =3D string[start + lenOfPreceedingInter=
nalNodeString: i]
+ preceedingNodes =3D self._makeNodesFromString(startSu=
bstring, depth)
+ nodes +=3D preceedingNodes
+
+ # Then We will try to see if the substring has any int=
ernal nodes first, make it then make nodes preceeding it and succeeding it.
+ if j + 1 < len(string):
+ stringRightOfBracket =3D string[j+1:] # Eg. '=
(b:0.4,a:0.3)c:0.3, stringRightOfBracket =3D c:0.3
+ match =3D re.search(r"[\)\,\(]", stringRightOfBrac=
ket)
+ if match:
+ indexOfNextSymbol =3D match.start()
+ stringRepOfInternalNode =3D stringRightOfBrack=
et[:indexOfNextSymbol]
+ internalNodes =3D self._makeNodesFromString( s=
tringRepOfInternalNode, depth)
+ if len(internalNodes) > 0:
+ InternalNode =3D internalNodes[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ngRepOfInternalNode)
+ else: # sometimes the node can be the last eleme=
nt of a string
+ InternalNode =3D self._makeNodesFromString(str=
ing[j+1:], depth)[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ng) - j
+ if InternalNode =3D=3D None: #creating a generic=
node if it is unnamed
+ InternalNode =3D self.phyloTree.makeNode( "", dept=
h=3Ddepth, isInternal=3DTrue ) #"internal-" + str(depth)
+ lenOfPreceedingInternalNodeString =3D 0
+
+ # recussive call to make the internal claude
+ childSubString =3D string[ i + 1 : j ]
+ InternalNode.addChildNode(self.parseNode(childSubStrin=
g, depth + 1))
+
+ nodes.append(InternalNode) # we append the internal n=
ode later to preserve order
+
+ start =3D j + 1
+ continue
+
+ if depth =3D=3D 0: # if its the root node, we do nothing about =
it and return
+ return nodes[0]
+
+ # Adding last most set of children
+ endString =3D string[start:]
+ if string[start-1] =3D=3D ")": # if the symbol belongs to an inte=
rnal node which is created previously, then we remove it from the string le=
ft to parse
+ match =3D re.search(r"[\)\,\(]", endString)
+ if match:
+ endOfNodeName =3D start + match.start() + 1
+ endString =3D string[endOfNodeName:]
+ nodes +=3D self._makeNodesFromString(endString, depth)
+
+ return nodes
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/nexusparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/nexusparser.py
@@ -0,0 +1,107 @@
+from newickparser import Newick_Parser
+import re
+
+MAX_READLINES =3D 200000
+
+
+class Nexus_Parser(Newick_Parser):
+
+ def __init__(self):
+ super(Nexus_Parser, self).__init__()
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Nexus content."""
+ return self.parseNexus(filePath)
+
+
+ def parseNexus(self, filename):
+ """ Nexus data is stored in blocks between a line starting with be=
gin and another line starting with end;
+ Commends inside square brackets are to be ignored,
+ For more information: http://wiki.christophchamp.com/index.php/NEX=
US_file_format
+ Nexus can store multiple trees
+ """
+
+ with open( filename, "rt") as nex_file:
+ nexlines =3D nex_file.readlines()
+
+ rowCount =3D 0
+ inTreeBlock =3D False # sentinel to check if we are in a t=
ree block
+ intranslateBlock =3D False # sentinel to check if we are in the=
translate region of the tree. Stores synonyms of the labellings
+ self.inCommentBlock =3D False
+ self.nameMapping =3D None # stores mapping representation us=
ed in nexus format
+ treeNames =3D []
+
+ for line in nexlines:
+ line =3D line.replace(";\n", "")
+ lline =3D line.lower()
+
+ if rowCount > MAX_READLINES or (not nex_file) :
+ break
+ rowCount +=3D1
+ # We are only interested in the tree block.
+ if "begin" in lline and "tree" in lline and not inTreeBlock:
+ inTreeBlock =3D True
+ continue
+ if inTreeBlock and "end" in lline[:3]:
+ inTreeBlock, currPhyloTree =3D False, None
+ continue
+
+ if inTreeBlock:
+
+ if "title" in lline: # Adding title to the tree
+ titleLoc =3D lline.find("title")
+ title =3D line[titleLoc + 5:].replace(" ", "")
+
+ continue
+
+ if "translate" in lline:
+ intranslateBlock =3D True
+ self.nameMapping =3D {}
+ continue
+
+ if intranslateBlock:
+ mappingLine =3D self.splitLinebyWhitespaces(line)
+ key, value =3D mappingLine[1], mappingLine[2].replace(=
",", "").replace("'","") #replacing illegal json characters
+ self.nameMapping[key] =3D value
+
+ # Extracting newick Trees
+ if "tree" in lline:
+ intranslateBlock =3D False
+
+ treeLineCols =3D self.splitLinebyWhitespaces(line)
+ treeName, newick =3D treeLineCols[2], treeLineCols[-1]
+
+ if newick =3D=3D "": # Empty lines can be found in =
tree blocks
+ continue
+
+ currPhyloTree =3D self._parseNewickToJson(newick, tree=
Name, nameMap=3Dself.nameMapping)
+
+ self.phyloTrees.append(currPhyloTree)
+ treeIndex =3D len(self.phyloTrees) - 1
+ treeNames.append( (treeName, treeIndex) ) # appendi=
ng name of tree, and its index
+ continue
+
+ return self.phyloTrees, treeNames
+
+
+ def splitLinebyWhitespaces(self, line):
+ """replace tabs and write spaces to a single write space, so we ca=
n properly split it."""
+ return re.split(r"\s+", line)
+
+
+ def checkComments(self, line):
+ """Check to see if the line/lines is a comment."""
+ if not self.inCommentBlock:
+ if "[" in line:
+ if "]" not in line:
+ self.inCommentBlock =3D True
+ else:
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ else :
+ if "]" in line:
+ if line.rfind("[") > line.rfind("]"):
+ pass # a comment block is closed but an=
other is open.
+ else:
+ self.inCommentBlock =3D False
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ return ""
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -0,0 +1,35 @@
+from newickparser import Newick_Parser
+from nexusparser import Nexus_Parser
+from phyloxmlparser import Phyloxml_Parser
+
+class Phyloviz_DataProvider(object):
+
+ def __init__(self):
+ pass
+
+ def parseFile(self, filepath, fileExt):
+ """returns [trees], meta
+ Trees are actually an array of JsonDicts. It's usually one tre=
e, except in the case of Nexus
+ """
+ jsonDicts, meta =3D [], {}
+ try:
+ if fileExt =3D=3D "nhx": # parses newick files
+ newickParser =3D Newick_Parser()
+ jsonDicts, parseMsg =3D newickParser.parseFile(filepath)
+ elif fileExt =3D=3D "phyloxml": # parses phyloXML files
+ phyloxmlParser =3D Phyloxml_Parser()
+ jsonDicts, parseMsg =3D phyloxmlParser.parseFile(filepath)
+ elif fileExt =3D=3D "nex": # parses nexus files
+ nexusParser =3D Nexus_Parser()
+ jsonDicts, parseMsg =3D nexusParser.parseFile(filepath)
+ meta["trees"] =3D parseMsg
+ else:
+ raise Exception("File type is not supported")
+
+ meta["msg"] =3D parseMsg
+
+ except Exception:
+ jsonDicts, meta["msg"] =3D [], "Parse failed"
+
+ return jsonDicts, meta
+
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
@@ -0,0 +1,134 @@
+from baseparser import Base_Parser, PhyloTree, Node
+from xml.etree import ElementTree
+
+class Phyloxml_Parser(Base_Parser):
+ """Parses a phyloxml file into a json file that will be passed to Phyl=
oViz for display"""
+
+ def __init__(self):
+ super(Phyloxml_Parser, self).__init__()
+ self.phyloTree =3D PhyloTree()
+ self.tagsOfInterest =3D {
+ "clade": "",
+ "name" : "name",
+ "branch_length" : "length",
+ "confidence" : "bootstrap",
+ "events" : "events"
+ }
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Phylogeny Tree content."""
+ phyloXmlFile =3D open(filePath, "r")
+
+ xmlTree =3D ElementTree.parse(phyloXmlFile)
+ xmlRoot =3D xmlTree.getroot()[0]
+ self.nameSpaceIndex =3D xmlRoot.tag.rfind("}") + 1 # used later by=
the clean tag method to remove the name space in every element.tag
+
+ phyloRoot =3D None
+ for child in xmlRoot:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ phyloRoot =3D child
+ elif childTag =3D=3D "name":
+ self.phyloTree.title =3D child.text
+
+ self.phyloTree.root =3D self.parseNode(phyloRoot, 0)
+ jsonDict =3D self.phyloTree.generateJsonableDict()
+ return [jsonDict], "Success"
+
+
+ def parseNode(self, node, depth):
+ """Parses any node within a phyloxml tree and looks out for claude=
, which signals the creation of
+ nodes - internal OR leaf"""
+ assert isinstance(node, etree._Element)
+
+ tag =3D self.cleanTag(node.tag)
+ if not tag =3D=3D "clade":
+ return None
+ hasInnerClade =3D False
+
+ # peeking once for parent and once for child to check if the node =
is internal
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ hasInnerClade =3D True
+ break
+
+ if hasInnerClade: # this node is an internal node
+ currentNode =3D self._makeInternalNode(node, depth=3D depth)
+ for child in node:
+ child =3D self.parseNode(child, depth + 1)
+ if isinstance(child, Node):
+ currentNode.addChildNode(child)
+
+ else: # this node is a leaf node
+ currentNode =3D self._makeLeafNode(node, depth=3Ddepth+1)
+
+ return currentNode
+
+
+ def _makeLeafNode(self, leafNode, depth =3D 0 ):
+ """Makes leaf nodes by calling Phylotree methods"""
+ node =3D {}
+ for child in leafNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in self.tagsOfInterest:
+ key =3D self.tagsOfInterest[childTag] # need to map phy=
loxml terms to ours
+ node[key] =3D child.text
+
+ node["depth"] =3D depth
+ return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
+
+ def _getNodeName(self, node, depth=3D-1):
+ """Gets the name of a claude. It handles the case where a taxonomy=
node is involved"""
+
+ def getTagFromTaxonomyNode(node):
+ """Returns the name of a taxonomy node. A taxonomy node have t=
o be treated differently as the name
+ is embedded one level deeper"""
+ phyloxmlTaxoNames =3D {
+ "common_name" : "",
+ "scientific_name" : "",
+ "code" : ""
+ }
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in phyloxmlTaxoNames:
+ return child.text
+ return ""
+
+ nodeName =3D ""
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "name" :
+ nodeName =3D child.text
+ break
+ elif childTag =3D=3D "taxonomy":
+ nodeName =3D getTagFromTaxonomyNode(child)
+ break
+
+ return nodeName
+
+
+ def _makeInternalNode(self, internalNode, depth=3D0):
+ """ Makes an internal node from an element object that is gurantee=
d to be a parent node.
+ Gets the value of interests like events and appends it to a custom=
node object that will be passed to PhyloTree to make nodes
+ """
+ node =3D {}
+ for child in internalNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ continue
+ elif childTag in self.tagsOfInterest:
+ if childTag =3D=3D "events": # events is nested 1 more =
level deeper than others
+ key, text =3D "events", self.cleanTag(child[0].tag)
+ else:
+ key =3D self.tagsOfInterest[childTag]
+ text =3D child.text
+ node[key] =3D text
+
+
+ return self.phyloTree.makeNode(self._getNodeName(internalNode, dep=
th), **node)
+
+
+ def cleanTag(self, tagString):
+ return tagString[self.nameSpaceIndex:]
+ =20
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -374,7 +374,7 @@
def browse_tool_shed( self, trans, **kwd ):
tool_shed_url =3D kwd[ 'tool_shed_url' ]
galaxy_url =3D url_for( '/', qualified=3DTrue )
- url =3D '%srepository/browse_valid_categories?galaxy_url=3D%s&weba=
pp=3Dgalaxy' % ( tool_shed_url, galaxy_url )
+ url =3D url_join( tool_shed_url, 'repository/browse_valid_categori=
es?galaxy_url=3D%s&webapp=3Dgalaxy' % ( galaxy_url ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -392,8 +392,9 @@
# Send a request to the relevant tool shed to see if there are any=
updates.
repository =3D get_repository( trans, kwd[ 'id' ] )
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, re=
pository )
- url =3D '%s/repository/check_for_updates?galaxy_url=3D%s&name=3D%s=
&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, url_for( '/', qualified=3DTrue ), repository.=
name, repository.owner, repository.changeset_revision )
+ url =3D url_join( tool_shed_url,
+ 'repository/check_for_updates?galaxy_url=3D%s&name=
=3D%s&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( url_for( '/', qualified=3DTrue ), repository.nam=
e, repository.owner, repository.changeset_revision ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -467,14 +468,14 @@
def find_tools_in_tool_shed( self, trans, **kwd ):
tool_shed_url =3D kwd[ 'tool_shed_url' ]
galaxy_url =3D url_for( '/', qualified=3DTrue )
- url =3D '%srepository/find_tools?galaxy_url=3D%s&webapp=3Dgalaxy' =
% ( tool_shed_url, galaxy_url )
+ url =3D url_join( tool_shed_url, 'repository/find_tools?galaxy_url=
=3D%s&webapp=3Dgalaxy' % galaxy_url )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
def find_workflows_in_tool_shed( self, trans, **kwd ):
tool_shed_url =3D kwd[ 'tool_shed_url' ]
galaxy_url =3D url_for( '/', qualified=3DTrue )
- url =3D '%srepository/find_workflows?galaxy_url=3D%s&webapp=3Dgala=
xy' % ( tool_shed_url, galaxy_url )
+ url =3D url_join( tool_shed_url, 'repository/find_workflows?galaxy=
_url=3D%s&webapp=3Dgalaxy' % galaxy_url )
return trans.response.send_redirect( url )
def generate_tool_path( self, repository_clone_url, changeset_revision=
):
"""
@@ -489,7 +490,7 @@
tool_shed_url =3D items[ 0 ]
repo_path =3D items[ 1 ]
tool_shed_url =3D clean_tool_shed_url( tool_shed_url )
- return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_rev=
ision )
+ return url_join( tool_shed_url, 'repos', repo_path, changeset_revi=
sion )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -634,8 +635,9 @@
tool_shed_repository,
trans.model.ToolShedRe=
pository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url =3D get_url_from_repository_tool_shed( trans=
.app, tool_shed_repository )
- url =3D '%s/repository/get_tool_versions?name=3D%s&owner=
=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, tool_shed_repository.name, tool_shed_=
repository.owner, tool_shed_repository.changeset_revision )
+ url =3D url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=3D%s&o=
wner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( tool_shed_repository.name, tool_shed_rep=
ository.owner, tool_shed_repository.changeset_revision ) )
response =3D urllib2.urlopen( url )
text =3D response.read()
response.close()
@@ -954,7 +956,9 @@
repository_ids =3D kwd.get( 'repository_ids', None )
changeset_revisions =3D kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
- url =3D '%srepository/get_repository_information?repository_id=
s=3D%s&changeset_revisions=3D%s&webapp=3Dgalaxy' % ( tool_shed_url, reposit=
ory_ids, changeset_revisions )
+ url =3D url_join( tool_shed_url,
+ 'repository/get_repository_information?reposit=
ory_ids=3D%s&changeset_revisions=3D%s&webapp=3Dgalaxy' % \
+ ( repository_ids, changeset_revisions ) )
response =3D urllib2.urlopen( url )
raw_text =3D response.read()
response.close()
@@ -1097,8 +1101,9 @@
name =3D repo_info_dict.keys()[ 0 ]
repo_info_tuple =3D repo_info_dict[ name ]
description, repository_clone_url, changeset_revision, ctx_rev=
, repository_owner, tool_dependencies =3D repo_info_tuple
- url =3D '%srepository/get_readme?name=3D%s&owner=3D%s&changese=
t_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, name, repository_owner, changeset_revisio=
n )
+ url =3D url_join( tool_shed_url,
+ 'repository/get_readme?name=3D%s&owner=3D%s&ch=
angeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( name, repository_owner, changeset_revision )=
)
response =3D urllib2.urlopen( url )
raw_text =3D response.read()
response.close()
@@ -1273,8 +1278,9 @@
tool_shed =3D get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the=
repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
- url =3D '%s/repository/previous_changeset_revisions?galaxy_url=3D%=
s&name=3D%s&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, url_for( '/', qualified=3DTrue ), repository_=
name, repository_owner, changeset_revision )
+ url =3D url_join( tool_shed_url,
+ 'repository/previous_changeset_revisions?galaxy_ur=
l=3D%s&name=3D%s&owner=3D%s&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( url_for( '/', qualified=3DTrue ), repository_nam=
e, repository_owner, changeset_revision ) )
response =3D urllib2.urlopen( url )
text =3D response.read()
response.close()
@@ -1350,8 +1356,9 @@
# Get the tool_versions from the tool shed for each tool in the in=
stalled change set.
repository =3D get_repository( trans, kwd[ 'id' ] )
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, re=
pository )
- url =3D '%s/repository/get_tool_versions?name=3D%s&owner=3D%s&chan=
geset_revision=3D%s&webapp=3Dgalaxy' % \
- ( tool_shed_url, repository.name, repository.owner, repository=
.changeset_revision )
+ url =3D url_join( tool_shed_url,
+ 'repository/get_tool_versions?name=3D%s&owner=3D%s=
&changeset_revision=3D%s&webapp=3Dgalaxy' % \
+ ( repository.name, repository.owner, repository.ch=
angeset_revision ) )
response =3D urllib2.urlopen( url )
text =3D response.read()
response.close()
@@ -1522,7 +1529,7 @@
def __generate_clone_url( self, trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url =3D get_url_from_repository_tool_shed( trans.app, re=
pository )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repos=
itory.name )
+ return url_join( tool_shed_url, 'repos', repository.owner, reposit=
ory.name )
=20
## ---- Utility methods --------------------------------------------------=
-----
=20
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -148,7 +148,8 @@
dbkey =3D build[0]
longname =3D build[1]
break =20
- assert dbkey is not '?', 'That build was not found'
+ if dbkey =3D=3D '?':
+ return trans.fill_template( '/admin/data_admin/generic_err=
or.mako', message=3D'An invalid build was specified.' )
ftp =3D ftplib.FTP('hgdownload.cse.ucsc.edu')
ftp.login('anonymous', trans.get_user().email)
checker =3D []
@@ -189,7 +190,8 @@
dbkeys=3Dtrans.ucsc_builds )
elif source =3D=3D 'Ensembl':
dbkey =3D params.get( 'ensembl_dbkey', None )
- assert dbkey is not '?', 'That build was not found'
+ if dbkey =3D=3D '?':
+ return trans.fill_template( '/admin/data_admin/generic_err=
or.mako', message=3D'An invalid build was specified.' )
for build in trans.ensembl_builds:
if build[ 'dbkey' ] =3D=3D dbkey:
dbkey =3D build[ 'dbkey' ]
@@ -199,7 +201,7 @@
break
url =3D 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.=
%s.%s.dna.toplevel.fa.gz' % ( release, pathname.lower(), pathname, dbkey, r=
elease )
else:
- return trans.fill_template( '/admin/data_admin/generic_error.m=
ako', message=3D'Somehow an invalid data source was specified.' )
+ return trans.fill_template( '/admin/data_admin/generic_error.m=
ako', message=3D'An invalid data source was specified.' )
if url is None:
return trans.fill_template( '/admin/data_admin/generic_error.m=
ako', message=3D'Unable to generate a valid URL with the specified paramete=
rs.' )
params =3D dict( protocol=3D'http', name=3Ddbkey, datatype=3D'fast=
a', url=3Durl, user=3Dtrans.user.id )
@@ -248,7 +250,8 @@
sa =3D trans.app.model.context.current
if jobtype =3D=3D 'liftover':
job =3D sa.query( model.TransferJob ).filter_by( id=3Djobid ).=
first()
- joblabel =3D 'Download liftOver'
+ liftover =3D trans.app.job_manager.deferred_job_queue.plugins[=
'LiftOverTransferPlugin'].get_job_status( jobid )
+ joblabel =3D 'Download liftOver (%s to %s)' % ( liftover.param=
s[ 'from_genome' ], liftover.params[ 'to_genome' ] )
elif jobtype =3D=3D 'transfer':
job =3D sa.query( model.TransferJob ).filter_by( id=3Djobid ).=
first()
joblabel =3D 'Download Genome'
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -203,12 +203,12 @@
job_id=3Djob.id,
job_tool_id=3Djob.tool_id,
job_command_line=3Djob.command_line,
- job_stderr=3Djob.stderr,
- job_stdout=3Djob.stdout,
- job_info=3Djob.info,
- job_traceback=3Djob.traceback,
+ job_stderr=3Dutil.unicodify( job.stderr ),
+ job_stdout=3Dutil.unicodify( job.stdout ),
+ job_info=3Dutil.unicodify( job.info ),
+ job_traceback=3Dutil.unicodify( job.tracebac=
k ),
email=3Demail,
- message=3Dmessage )
+ message=3Dutil.unicodify( message ) )
frm =3D to_address
# Check email a bit
email =3D email.strip()
@@ -644,7 +644,10 @@
dataset =3D self.get_dataset( trans, id, False, True )
if not dataset:
web.httpexceptions.HTTPNotFound()
- return self.get_item_annotation_str( trans.sa_session, trans.user,=
dataset )
+ annotation =3D self.get_item_annotation_str( trans.sa_session, tra=
ns.user, dataset )
+ if annotation and isinstance( annotation, unicode ):
+ annotation =3D annotation.encode( 'ascii', 'replace' ) #paste =
needs ascii here
+ return annotation
=20
@web.expose
def display_at( self, trans, dataset_id, filename=3DNone, **kwd ):
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/phyloviz.py
--- /dev/null
+++ b/lib/galaxy/web/controllers/phyloviz.py
@@ -0,0 +1,97 @@
+import pkg_resources
+pkg_resources.require( "bx-python" )
+
+from galaxy.util.json import to_json_string, from_json_string
+from galaxy.web.base.controller import *
+from galaxy.visualization.phyloviz.phyloviz_dataprovider import Phyloviz_D=
ataProvider
+
+
+class PhyloVizController( BaseUIController, UsesVisualizationMixin, UsesHi=
storyDatasetAssociationMixin, SharableMixin ):
+ """
+ Controller for phyloViz browser interface.
+ """
+ def __init__(self, app ):
+ BaseUIController.__init__( self, app )
+
+ @web.expose
+ @web.require_login()
+ def index( self, trans, dataset_id =3D None, **kwargs ):
+ """
+ The index method is called using phyloviz/ with a dataset id passe=
d in.
+ The relevant data set is then retrieved via get_json_from_datasetI=
d which interfaces with the parser
+ The json representation of the phylogenetic tree along with the co=
nfig is then written in the .mako template and passed back to the user
+ """
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id)
+ config["saved_visualization"] =3D False
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D json, config=3Dconfig)
+
+
+ @web.expose
+ def visualization(self, trans, id):
+ """
+ Called using a viz_id (id) to retrieved stored visualization data =
(in json format) and all the viz_config
+ """
+ viz =3D self.get_visualization(trans, id)
+ config =3D self.get_visualization_config(trans, viz)
+ config["saved_visualization"] =3D True
+ data =3D config["root"]
+
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D data, config=3Dconfig)
+
+
+ @web.expose
+ @web.json
+ def load_visualization_json(self, trans, viz_id):
+ """
+ Though not used in current implementation, this provides user with=
a convenient method to retrieve the viz_data & viz_config via json.
+ """
+ viz =3D self.get_visualization(trans, viz_id)
+ viz_config =3D self.get_visualization_config(trans, viz)
+ viz_config["saved_visualization"] =3D True
+ return {
+ "data" : viz_config["root"],
+ "config" : viz_config
+ }
+
+
+ @web.expose
+ @web.json
+ def getJsonData(self, trans, dataset_id, treeIndex=3D0):
+ """
+ Method to retrieve data asynchronously via json format. Retriving =
from here rather than
+ making a direct datasets/ call allows for some processing and even=
t capturing
+ """
+ treeIndex =3D int(treeIndex)
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id, t=
reeIndex)
+ packedJson =3D {
+ "data" : json,
+ "config" : config
+ }
+
+ return packedJson
+
+
+ def get_json_from_datasetId(self, trans, dataset_id, treeIndex=3D0):
+ """
+ For interfacing phyloviz controllers with phyloviz visualization d=
ata provider (parsers)
+ """
+ dataset =3D self.get_dataset(trans, dataset_id)
+ fileExt, filepath =3D dataset.ext, dataset.file_name # .name=
stores the name of the dataset from the orginal upload
+ json, config =3D "", {} # config contains propertie=
s of the tree and file
+
+ if fileExt =3D=3D "json":
+ something, json =3D self.get_data(dataset)
+ else:
+ try:
+ pd =3D Phyloviz_DataProvider()
+ json, config =3D pd.parseFile(filepath, fileExt)
+ json =3D json[treeIndex]
+ except Exception:
+ pass
+
+ config["title"] =3D dataset.display_name()
+ config["ext"] =3D fileExt
+ config["dataset_id"] =3D dataset_id
+ config["treeIndex"] =3D treeIndex
+
+ return json, config
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -16,6 +16,10 @@
action =3D "paramamonster"
elif item.type =3D=3D "circster":
action =3D "circster"
+ elif item.type =3D=3D "phyloviz":
+ # Support phyloviz
+ controller =3D "phyloviz"
+ action =3D "visualization"
return dict( controller=3Dcontroller, action=3Daction, id=3Ditem.i=
d )
=20
# Grid definition
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -11,7 +11,7 @@
from galaxy.model.orm import *
from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for=
_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFI=
GS
from galaxy.util.shed_util import open_repository_files_folder, reversed_l=
ower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path
-from galaxy.util.shed_util import to_html_escaped, update_repository
+from galaxy.util.shed_util import to_html_escaped, update_repository, url_=
join
from galaxy.tool_shed.encoding_util import *
from common import *
=20
@@ -246,6 +246,25 @@
grids.GridAction( "User preferences", dict( controller=3D'user=
', action=3D'index', cntrller=3D'repository', webapp=3D'community' ) )
]
=20
+class WritableRepositoryListGrid( RepositoryListGrid ):
+ def build_initial_query( self, trans, **kwd ):
+ # TODO: improve performance by adding a db table associating users=
with repositories for which they have write access.
+ username =3D kwd[ 'username' ]
+ clause_list =3D []
+ for repository in trans.sa_session.query( self.model_class ):
+ allow_push_usernames =3D repository.allow_push.split( ',' )
+ if username in allow_push_usernames:
+ clause_list.append( self.model_class.table.c.id =3D=3D rep=
ository.id )
+ if clause_list:
+ return trans.sa_session.query( self.model_class ) \
+ .filter( or_( *clause_list ) ) \
+ .join( model.User.table ) \
+ .outerjoin( model.RepositoryCategoryAss=
ociation.table ) \
+ .outerjoin( model.Category.table )
+ # Return an empty query.
+ return trans.sa_session.query( self.model_class ) \
+ .filter( self.model_class.table.c.id < 0 )
+
class ValidRepositoryListGrid( RepositoryListGrid ):
class CategoryColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
@@ -393,6 +412,7 @@
email_alerts_repository_list_grid =3D EmailAlertsRepositoryListGrid()
category_list_grid =3D CategoryListGrid()
valid_category_list_grid =3D ValidCategoryListGrid()
+ writable_repository_list_grid =3D WritableRepositoryListGrid()
=20
def __add_hgweb_config_entry( self, trans, repository, repository_path=
):
# Add an entry in the hgweb.config file for a new repository. An =
entry looks something like:
@@ -519,12 +539,15 @@
repository_id =3D kwd.get( 'id', None )
repository =3D get_repository( trans, repository_id )
kwd[ 'f-email' ] =3D repository.user.email
- elif operation =3D=3D "my_repositories":
+ elif operation =3D=3D "repositories_i_own":
# Eliminate the current filters if any exist.
for k, v in kwd.items():
if k.startswith( 'f-' ):
del kwd[ k ]
kwd[ 'f-email' ] =3D trans.user.email
+ elif operation =3D=3D "writable_repositories":
+ kwd[ 'username' ] =3D trans.user.username
+ return self.writable_repository_list_grid( trans, **kwd )
elif operation =3D=3D "repositories_by_category":
# Eliminate the current filters if any exist.
for k, v in kwd.items():
@@ -726,9 +749,10 @@
update =3D 'true'
no_update =3D 'false'
else:
- # Start building up the url to redirect back to the calling Ga=
laxy instance.
- url =3D '%sadmin_toolshed/update_to_changeset_revision?tool_sh=
ed_url=3D%s' % ( galaxy_url, url_for( '/', qualified=3DTrue ) )
- url +=3D '&name=3D%s&owner=3D%s&changeset_revision=3D%s&latest=
_changeset_revision=3D' % ( repository.name, repository.user.username, chan=
geset_revision )
+ # Start building up the url to redirect back to the calling Ga=
laxy instance. =20
+ url =3D url_join( galaxy_url,
+ 'admin_toolshed/update_to_changeset_revision?t=
ool_shed_url=3D%s&name=3D%s&owner=3D%s&changeset_revision=3D%s&latest_chang=
eset_revision=3D' % \
+ ( url_for( '/', qualified=3DTrue ), repository=
.name, repository.user.username, changeset_revision ) )
if changeset_revision =3D=3D repository.tip:
# If changeset_revision is the repository tip, there are no ad=
ditional updates.
if from_update_manager:
@@ -1372,10 +1396,9 @@
"""Send the list of repository_ids and changeset_revisions to Gala=
xy so it can begin the installation process."""
galaxy_url =3D trans.get_cookie( name=3D'toolshedgalaxyurl' )
# Redirect back to local Galaxy to perform install.
- url =3D '%sadmin_toolshed/prepare_for_install' % galaxy_url
- url +=3D '?tool_shed_url=3D%s' % url_for( '/', qualified=3DTrue )
- url +=3D '&repository_ids=3D%s' % ','.join( util.listify( reposito=
ry_ids ) )
- url +=3D '&changeset_revisions=3D%s' % ','.join( util.listify( cha=
ngeset_revisions ) )
+ url =3D url_join( galaxy_url,
+ 'admin_toolshed/prepare_for_install?tool_shed_url=
=3D%s&repository_ids=3D%s&changeset_revisions=3D%s' % \
+ ( url_for( '/', qualified=3DTrue ), ','.join( util=
.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions )=
) ) )
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, change=
set_revision, **kwd ):
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 static/scripts/viz/phyloviz.js
--- /dev/null
+++ b/static/scripts/viz/phyloviz.js
@@ -0,0 +1,955 @@
+var UserMenuBase =3D Backbone.View.extend({
+ /**
+ * Base class of any menus that takes in user interaction. Contains ch=
ecking methods.
+ */
+
+ className: 'UserMenuBase',
+
+ isAcceptableValue : function ($inputKey, min, max) {
+ /**
+ * Check if an input value is a number and falls within max min.
+ */
+ var self =3D this,
+ value =3D $inputKey.val(),
+ fieldName =3D $inputKey.attr("displayLabel") || $inputKey.attr=
("id").replace("phyloViz", "");
+
+ function isNumeric(n) {
+ return !isNaN(parseFloat(n)) && isFinite(n);
+ }
+
+ if (!isNumeric(value)){
+ alert(fieldName + " is not a number!");
+ return false;
+ }
+
+ if ( value > max){
+ alert(fieldName + " is too large.");
+ return false;
+ } else if ( value < min) {
+ alert(fieldName + " is too small.");
+ return false;
+ }
+ return true;
+ },
+
+ hasIllegalJsonCharacters : function($inputKey) {
+ /**
+ * Check if any user string inputs has illegal characters that jso=
n cannot accept
+ */
+ if ($inputKey.val().search(/"|'|\\/) !=3D=3D -1){
+ alert("Named fields cannot contain these illegal characters: d=
ouble quote(\"), single guote(\'), or back slash(\\). ");
+ return true;
+ }
+ return false;
+ }
+});
+
+
+function PhyloTreeLayout() {
+ /**
+ * -- Custom Layout call for phyloViz to suit the needs of a phylogene=
tic tree.
+ * -- Specifically: 1) Nodes have a display display of (=3D evo dist X=
depth separation) from their parent
+ * 2) Nodes must appear in other after they have expa=
nd and contracted
+ */
+
+ var self =3D this,
+ hierarchy =3D d3.layout.hierarchy().sort(null).value(null),
+ height =3D 360, // ! represents both the layout angle and the heig=
ht of the layout, in px
+ layoutMode =3D "Linear",
+ leafHeight =3D 18, // height of each individual leaf node
+ depthSeparation =3D 200, // separation between nodes of different =
depth, in px
+ leafIndex =3D 0, // change to recurssive call
+ defaultDist =3D 0.5, // tree defaults to 0.5 dist if no dist is sp=
ecified
+ maxTextWidth =3D 50; // maximum length of the text labels
+
+
+ self.leafHeight =3D function(inputLeafHeight){
+ if (typeof inputLeafHeight =3D=3D=3D "undefined"){ return leafHeig=
ht; }
+ else { leafHeight =3D inputLeafHeight; return self;}
+ };
+
+ self.layoutMode =3D function(mode){
+ if (typeof mode =3D=3D=3D "undefined"){ return layoutMode; }
+ else { layoutMode =3D mode; return self;}
+ };
+
+ self.layoutAngle =3D function(angle) { // changes the layout angle =
of the display, which is really changing the height
+ if (typeof angle =3D=3D=3D "undefined"){ return height; }
+ if (isNaN(angle) || angle < 0 || angle > 360) { return self; } // =
to use default if the user puts in strange values
+ else { height =3D angle; return self;}
+ };
+
+ self.separation =3D function(dist){ // changes the dist between the =
nodes of different depth
+ if (typeof dist =3D=3D=3D "undefined"){ return depthSeparation; }
+ else { depthSeparation =3D dist; return self;}
+ };
+
+ self.links =3D function (nodes) { // uses d3 native method to gene=
rate links. Done.
+ return d3.layout.tree().links(nodes);
+ };
+
+ // -- Custom method for laying out phylogeny tree in a linear fashion
+ self.nodes =3D function (d, i) {
+ var _nodes =3D hierarchy.call(self, d, i), // self is to f=
ind the depth of all the nodes, assumes root is passed in
+ nodes =3D [],
+ maxDepth =3D 0,
+ numLeaves =3D 0;
+
+ // changing from hierarchy's custom format for data to usable form=
at
+ _nodes.forEach(function (_node){
+ var node =3D _node.data;
+ node.depth =3D _node.depth;
+ maxDepth =3D node.depth > maxDepth ? node.depth : maxDepth; /=
/finding max depth of tree
+ nodes.push(node);
+ });
+ // counting the number of leaf nodes and assigning max depth to no=
des that do not have children to flush all the leave nodes
+ nodes.forEach(function(node){
+ if ( !node.children ) { //&& !node._children
+ numLeaves +=3D 1;
+ node.depth =3D maxDepth; // if a leaf has no child it woul=
d be assigned max depth
+ }
+ });
+
+ leafHeight =3D layoutMode =3D=3D=3D "Circular" ? height / numLeave=
s : leafHeight;
+ leafIndex =3D 0;
+ layout(nodes[0], maxDepth, leafHeight, null);
+
+ return nodes;
+ };
+
+
+ function layout (node, maxDepth, vertSeparation, parent) {
+ /**
+ * -- Function with side effect of adding x0, y0 to all child; tak=
e in the root as starting point
+ * assuming that the leave nodes would be sorted in presented ord=
er
+ * horizontal(y0) is calculated according to (=3D evo dis=
t X depth separation) from their parent
+ * vertical (x0) - if leave node: find its order in all o=
f the leave node =3D=3D=3D node.id, then multiply by verticalSeparation
+ * - if parent node: is place in the mid point al=
l of its children nodes
+ * -- The layout will first calculate the y0 field going towards t=
he leaves, and x0 when returning
+ */
+ var children =3D node.children,
+ sumChildVertSeparation =3D 0;
+
+ // calculation of node's dist from parents, going down.
+ var dist =3D node.dist || defaultDist;
+ dist =3D dist > 1 ? 1 : dist; // We constrain all dist to be l=
ess than one
+ node.dist =3D dist;
+ if (parent !=3D=3D null){
+ node.y0 =3D parent.y0 + dist * depthSeparation;
+ } else { //root node
+ node.y0 =3D maxTextWidth;
+ }
+
+
+ // if a node have no children, we will treat it as a leaf and star=
t laying it out first
+ if (!children) {
+ node.x0 =3D leafIndex++ * vertSeparation;
+ } else {
+ // if it has children, we will visit all its children and calc=
ulate its position from its children
+ children.forEach( function (child) {
+ child.parent =3D node;
+ sumChildVertSeparation +=3D layout(child, maxDepth, vertSe=
paration, node);
+ });
+ node.x0 =3D sumChildVertSeparation / children.length;
+ }
+
+ // adding properties to the newly created node
+ node.x =3D node.x0;
+ node.y =3D node.y0;
+ return node.x0;
+ }
+ return self;
+}
+
+
+/**
+ * -- PhyloTree Model --
+ */
+var PhyloTree =3D Visualization.extend({
+ defaults : {
+ layout: "Linear",
+ separation : 250, // px dist between nodes of different depth t=
o represent 1 evolutionary until
+ leafHeight: 18,
+ type : "phyloviz", // visualization type
+ title : "Title",
+ scaleFactor: 1,
+ translate: [0,0],
+ fontSize: 12, //fontSize of node label
+ selectedNode : null,
+ nodeAttrChangedTime : 0
+ },
+
+ root : {}, // Root has to be its own independent object because it is =
not part of the viz_config
+
+ toggle : function (d) {
+ /**
+ * Mechanism to expand or contract a single node. Expanded nodes h=
ave a children list, while for
+ * contracted nodes the list is stored in _children. Nodes with th=
eir children data stored in _children will not have their
+ * children rendered.
+ */
+ if(typeof d =3D=3D=3D "undefined") {return ;}
+ if (d.children ) {
+ d._children =3D d.children;
+ d.children =3D null;
+ } else {
+ d.children =3D d._children;
+ d._children =3D null;
+ }
+ },
+
+ toggleAll : function(d) {
+ /**
+ * Contracts the phylotree to a single node by repeatedly calling=
itself to place all the list
+ * of children under _children.
+ */
+ if (d.children && d.children.length !=3D=3D 0) {
+ d.children.forEach(this.toggleAll);
+ toggle(d);
+ }
+ },
+
+ getData : function (){
+ /**
+ * Return the data of the tree. Used for preserving state.
+ */
+ return this.root;
+ },
+
+ save: function() {
+ /**
+ * Overriding the default save mechanism to do some clean of circu=
lar reference of the
+ * phyloTree and to include phyloTree in the saved json
+ */
+ var root =3D this.root;
+ cleanTree(root);
+ this.set("root", root);
+
+ function cleanTree(node){
+ // we need to remove parent to delete circular reference
+ delete node.parent;
+
+ // removing unnecessary attributes
+ if (node._selected){ delete node._selected;}
+
+ node.children ? node.children.forEach(cleanTree) : 0;
+ node._children ? node._children.forEach(cleanTree) : 0;
+ }
+
+ var config =3D jQuery.extend(true, {}, this.attributes);
+ config["selectedNode"] =3D null;
+
+ show_message("Saving to Galaxy", "progress");
+
+ return $.ajax({
+ url: this.url(),
+ type: "POST",
+ dataType: "json",
+ data: {
+ vis_json: JSON.stringify(config)
+ },
+ success: function(res){
+ var viz_id =3D res.url.split("id=3D")[1].split("&")[0],
+ viz_url =3D "/phyloviz/visualization?id=3D" + viz_id;
+ window.history.pushState({}, "", viz_url + window.location=
.hash);
+ hide_modal();
+ }
+ });
+ }
+});
+
+
+
+/**
+ * -- Views --
+ */
+var PhylovizLayoutBase =3D Backbone.View.extend({
+ /**
+ * Stores the default variable for setting up the visualization
+ */
+ defaults : {
+ nodeRadius : 4.5 // radius of each node in the diagram
+ },
+
+
+ stdInit : function (options) {
+ /**
+ * Common initialization in layouts
+ */
+
+ var self =3D this;
+ self.model.on("change:separation change:leafHeight change:fontSize=
change:nodeAttrChangedTime", self.updateAndRender, self);
+
+ self.vis =3D options.vis;
+ self.i =3D 0;
+ self.maxDepth =3D -1; // stores the max depth of the tree
+
+ self.width =3D options.width;
+ self.height =3D options.height;
+ },
+
+
+ updateAndRender : function(source) {
+ /**
+ * Updates the visualization whenever there are changes in the ex=
pansion and contraction of nodes
+ * AND possibly when the tree is edited.
+ */
+ var vis =3D d3.select(".vis"),
+ self =3D this;
+ source =3D source || self.model.root;
+
+ self.renderNodes(source);
+ self.renderLinks(source);
+ self.addTooltips();
+ },
+
+
+ renderLinks : function(source) {
+ /**
+ * Renders the links for the visualization.
+ */
+ var self =3D this;
+ var diagonal =3D self.diagonal;
+ var duration =3D self.duration;
+ var layoutMode =3D self.layoutMode;
+ var link =3D self.vis.selectAll("g.completeLink")
+ .data(self.tree.links(self.nodes), function(d) { return d.targ=
et.id; });
+
+ var calcalateLinePos =3D function(d) {
+ d.pos0 =3D d.source.y0 + " " + d.source.x0; // position of t=
he source node <=3D> starting location of the line drawn
+ d.pos1 =3D d.source.y0 + " " + d.target.x0; // position where=
the line makes a right angle bend
+ d.pos2 =3D d.target.y0 + " " + d.target.x0; // point where=
the horizontal line becomes a dotted line
+ };
+
+ var linkEnter =3D link.enter().insert("svg:g","g.node")
+ .attr("class", "completeLink");
+
+
+ linkEnter.append("svg:path")
+ .attr("class", "link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1;
+ });
+
+ var linkUpdate =3D link.transition().duration(500);
+
+ linkUpdate.select("path.link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1 + " L " + d.pos2;
+ });
+
+ var linkExit =3D link.exit().remove();
+
+ },
+
+ // User Interaction methods below
+
+ selectNode : function(node){
+ /**
+ * Displays the information for editting
+ */
+ var self =3D this;
+ d3.selectAll("g.node")
+ .classed("selectedHighlight", function(d){
+ if (node.id =3D=3D=3D d.id){
+ if(node._selected) { // for de=3Dselecting node.
+ delete node._selected;
+ return false;
+ } else {
+ node._selected =3D true;
+ return true;
+ }
+ }
+ return false;
+ });
+
+ self.model.set("selectedNode", node);
+ $("#phyloVizSelectedNodeName").val(node.name);
+ $("#phyloVizSelectedNodeDist").val(node.dist);
+ $("#phyloVizSelectedNodeAnnotation").val(node.annotation || "");
+ },
+
+ addTooltips : function (){
+ /**
+ * Creates bootstrap tooltip for the visualization. Has to be cal=
led repeatedly due to newly generated
+ * enterNodes
+ */
+ $(".bs-tooltip").remove(); //clean up tooltip, just in case i=
ts listeners are removed by d3
+ $(".node")
+ .attr("data-original-title", function(){
+ var d =3D this.__data__,
+ annotation =3D d.annotation || "None" ;
+ return d ? (d.name ? d.name + "<br/>" : "") + "Dist: " + d=
.dist + " <br/>Annotation: " + annotation: "";
+ })
+ .tooltip({'placement':'top', 'trigger' : 'hover'});
+
+ }
+});
+
+
+
+
+var PhylovizLinearView =3D PhylovizLayoutBase.extend({
+ /**
+ * Linea layout class of Phyloviz, is responsible for rendering the no=
des
+ * calls PhyloTreeLayout to determine the positions of the nodes
+ */
+ initialize : function(options){
+ // Default values of linear layout
+ var self =3D this;
+ self.margins =3D options.margins;
+ self.layoutMode =3D "Linear";
+
+ self.stdInit(options);
+
+ self.layout();
+ self.updateAndRender(self.model.root);
+ },
+
+ layout : function() {
+ /**
+ * Creates the basic layout of a linear tree by precalculating fix=
ed values.
+ * One of calculations are also made here
+ */
+
+ var self =3D this;
+
+ self.tree =3D new PhyloTreeLayout().layoutMode("Linear");
+ self.diagonal =3D d3.svg.diagonal()
+ .projection(function(d) { return [d.y, d.x ]; });
+ },
+
+ renderNodes : function (source) {
+ /**
+ * Renders the nodes base on Linear layout.
+ */
+ var self =3D this,
+ fontSize =3D self.model.get("fontSize") + "px";
+
+ // assigning properties from models
+ self.tree.separation(self.model.get("separation")).leafHeight(self=
.model.get("leafHeight"));
+
+ var duration =3D 500,
+ nodes =3D self.tree.separation(self.model.get("separation")).n=
odes(self.model.root);
+
+ var node =3D self.vis.selectAll("g.node")
+ .data(nodes, function(d) { return d.name + d.id || (d.id =3D +=
+self.i); });
+
+ // These variables has to be passed into update links which are in=
the base methods
+ self.nodes =3D nodes;
+ self.duration =3D duration;
+
+ // ------- D3 ENTRY --------
+ // Enter any new nodes at the parent's previous position.
+ var nodeEnter =3D node.enter().append("svg:g")
+ .attr("class", "node")
+ .on("dblclick", function(){ d3.event.stopPropagation(); })
+ .on("click", function(d) {
+ if (d3.event.altKey) {
+ self.selectNode(d); // display info if alt is p=
ressed
+ } else {
+ if(d.children && d.children.length =3D=3D=3D 0){ retur=
n;} // there is no need to toggle leaves
+ self.model.toggle(d); // contract/expand nodes at da=
ta level
+ self.updateAndRender(d); // re-render the tree
+ }
+ });
+
+ nodeEnter.attr("transform", function(d) { return "translate(" + so=
urce.y0 + "," + source.x0 + ")"; });
+
+ nodeEnter.append("svg:circle")
+ .attr("r", 1e-6)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeEnter.append("svg:text")
+ .attr("class", "nodeLabel")
+ .attr("x", function(d) { return d.children || d._children ? -1=
0 : 10; })
+ .attr("dy", ".35em")
+ .attr("text-anchor", function(d) { return d.children || d._chi=
ldren ? "end" : "start"; })
+ .style("fill-opacity", 1e-6);
+
+ // ------- D3 TRANSITION --------
+ // Transition nodes to their new position.
+ var nodeUpdate =3D node.transition()
+ .duration(duration);
+
+ nodeUpdate.attr("transform", function(d) {
+ return "translate(" + d.y + "," + d.x + ")"; });
+
+ nodeUpdate.select("circle")
+ .attr("r", self.defaults.nodeRadius)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeUpdate.select("text")
+ .style("fill-opacity", 1)
+ .style("font-size", fontSize)
+ .text(function(d) { return d.name; });
+
+ // ------- D3 EXIT --------
+ // Transition exiting nodes to the parent's new position.
+ var nodeExit =3Dnode.exit().transition()
+ .duration(duration)
+ .remove();
+
+ nodeExit.select("circle")
+ .attr("r", 1e-6);
+
+ nodeExit.select("text")
+ .style("fill-opacity", 1e-6);
+
+ // Stash the old positions for transition.
+ nodes.forEach(function(d) {
+ d.x0 =3D d.x; // we need the x0, y0 for parents with children
+ d.y0 =3D d.y;
+ });
+ }
+
+});
+
+var PhylovizView =3D Backbone.View.extend({
+
+ className: 'phyloviz',
+
+ initialize: function(options) {
+ var self =3D this;
+ // -- Default values of the vis
+ self.MIN_SCALE =3D 0.05; //for zooming
+ self.MAX_SCALE =3D 5;
+ self.MAX_DISPLACEMENT =3D 500;
+ self.margins =3D [10, 60, 10, 80];
+
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.radius =3D self.width;
+ self.data =3D options.data;
+
+ // -- Events Phyloviz view responses to
+ $(window).resize(function(){
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.render();
+ });
+
+ // -- Create phyloTree model
+ self.phyloTree =3D new PhyloTree(options.config);
+ self.phyloTree.root =3D self.data;
+
+ // -- Set up UI functions of main view
+ self.zoomFunc =3D d3.behavior.zoom().scaleExtent([self.MIN_SCALE, =
self.MAX_SCALE]);
+ self.zoomFunc.translate(self.phyloTree.get("translate"));
+ self.zoomFunc.scale(self.phyloTree.get("scaleFactor"));
+
+ // -- set up header buttons, search and settings menu
+ self.navMenu =3D new HeaderButtons(self);
+ self.settingsMenu =3D new SettingsMenu({phyloTree : self.phyloTree=
});
+ self.nodeSelectionView =3D new NodeSelectionView({phyloTree : self=
.phyloTree});
+ self.search =3D new PhyloVizSearch();
+
+
+ setTimeout(function(){ // using settimeout to call the zoomAn=
dPan function according to the stored attributes in viz_config
+ self.zoomAndPan();
+ }, 1000);
+ },
+
+ render: function(){
+ // -- Creating helper function for vis. --
+ var self =3D this;
+ $("#PhyloViz").empty();
+
+ // -- Layout viz. --
+ self.mainSVG =3D d3.select("#PhyloViz").append("svg:svg")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("pointer-events", "all")
+ .call(self.zoomFunc.on("zoom", function(){
+ self.zoomAndPan();
+ }));
+
+ self.boundingRect =3D self.mainSVG.append("svg:rect")
+ .attr("class", "boundingRect")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("stroke", "black")
+ .attr("fill", "white");
+
+ self.vis =3D self.mainSVG
+ .append("svg:g")
+ .attr("class", "vis");
+
+ self.layoutOptions =3D {
+ model : self.phyloTree,
+ width : self.width,
+ height : self.height,
+ vis: self.vis,
+ margins: self.margins
+ };
+
+ // -- Creating Title
+ $("#title").text("Phylogenetic Tree from " + self.phyloTree.get("t=
itle") + ":");
+
+ // -- Create Linear view instance --
+ var linearView =3D new PhylovizLinearView(self.layoutOptions)
+ },
+
+ zoomAndPan : function(event){
+ /**
+ * Function to zoom and pan the svg element which the entire tree =
is contained within
+ * Uses d3.zoom events, and extend them to allow manual updates an=
d keeping states in model
+ */
+ if (typeof event !=3D=3D "undefined") {
+ var zoomParams =3D event.zoom,
+ translateParams =3D event.translate;
+ }
+
+ var self =3D this,
+ scaleFactor =3D self.zoomFunc.scale(),
+ translationCoor =3D self.zoomFunc.translate(),
+ zoomStatement =3D "",
+ translateStatement =3D "";
+
+ // Do manual scaling.
+ switch (zoomParams) {
+ case "reset":
+ scaleFactor =3D 1.0;
+ translationCoor =3D [0,0]; break;
+ case "+":
+ scaleFactor *=3D 1.1; break;
+ case "-":
+ scaleFactor *=3D 0.9; break;
+ default:
+ if (typeof zoomParams =3D=3D=3D "number") {
+ scaleFactor =3D zoomParams;
+ } else if (d3.event !=3D=3D null) {
+ scaleFactor =3D d3.event.scale;
+ }
+ }
+ if (scaleFactor < self.MIN_SCALE || scaleFactor > self.MAX_SCALE) =
{ return;}
+ self.zoomFunc.scale(scaleFactor); //update scale Factor
+ zoomStatement =3D "translate(" + self.margins[3] + "," + self.mar=
gins[0] + ")" +
+ " scale(" + scaleFactor + ")";
+
+ // Do manual translation.
+ if( d3.event !=3D=3D null) {
+ translateStatement =3D "translate(" + d3.event.translate + ")";
+ } else {
+ if(typeof translateParams !=3D=3D "undefined") {
+ var x =3D translateParams.split(",")[0];
+ var y =3D translateParams.split(",")[1];
+ if (!isNaN(x) && !isNaN(y)){
+ translationCoor =3D [translationCoor[0] + parseFloat(x=
), translationCoor[1] + parseFloat(y)];
+ }
+ }
+ self.zoomFunc.translate(translationCoor); // update zoomFunc
+ translateStatement =3D "translate(" + translationCoor + ")";
+ }
+
+ self.phyloTree.set("scaleFactor", scaleFactor);
+ self.phyloTree.set("translate", translationCoor);
+ self.vis.attr("transform", translateStatement + zoomStatement); //=
refers to the view that we are actually zooming
+ },
+
+
+ reloadViz : function() {
+ /**
+ * Primes the Ajax URL to load another Nexus tree
+ */
+ var self =3D this,
+ treeIndex =3D $("#phylovizNexSelector :selected").val(),
+ dataset_id =3D self.phyloTree.get("dataset_id"),
+ url =3D "phyloviz/getJsonData?dataset_id=3D" + dataset_id + "&=
treeIndex=3D" + String(treeIndex);
+ $.getJSON(url, function(packedJson){
+ window.initPhyloViz(packedJson.data, packedJson.config);
+ });
+ }
+});
+
+
+var HeaderButtons =3D Backbone.View.extend({
+
+ initialize : function(phylovizView){
+ var self =3D this;
+ self.phylovizView =3D phylovizView;
+
+ // Clean up code - if the class initialized more than once
+ $("#panelHeaderRightBtns").empty();
+ $("#phyloVizNavBtns").empty();
+ $("#phylovizNexSelector").off();
+
+ self.initNavBtns();
+ self.initRightHeaderBtns();
+
+ // Initial a tree selector in the case of nexus
+ $("#phylovizNexSelector").off().on("change", function() {self.phy=
lovizView.reloadViz();} );
+
+ },
+
+ initRightHeaderBtns : function(){
+ var self =3D this;
+
+ rightMenu =3D create_icon_buttons_menu([
+ { icon_class: 'gear', title: 'PhyloViz Settings', on_click: fu=
nction(){
+ $("#SettingsMenu").show();
+ self.settingsMenu.updateUI();
+ } },
+ { icon_class: 'disk', title: 'Save visualization', on_click: f=
unction() {
+ var nexSelected =3D $("#phylovizNexSelector option:selecte=
d").text();
+ if(nexSelected) {
+ self.phylovizView.phyloTree.set("title", nexSelected);
+ }
+ self.phylovizView.phyloTree.save();
+ } },
+ { icon_class: 'chevron-expand', title: 'Search / Edit Nodes', =
on_click: function() {
+ $("#nodeSelectionView").show();
+ } },
+ { icon_class: 'information', title: 'Phyloviz Help', on_click:=
function() {
+ window.open('http://wiki.g2.bx.psu.edu/Learn/Visualization=
/PhylogeneticTree');
+ // https://docs.google.com/document/d/1AXFoJgEpxr21H3LICRs=
3EyMe1B1X_KFPouzIgrCz3zk/edit
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#panelHeaderRightBtns").append(rightMenu.$el);
+ },
+
+ initNavBtns: function() {
+ var self =3D this,
+ navMenu =3D create_icon_buttons_menu([
+ { icon_class: 'zoom-in', title: 'Zoom in', on_click: funct=
ion() {
+ self.phylovizView.zoomAndPan({ zoom : "+"});
+ } },
+ { icon_class: 'zoom-out', title: 'Zoom out', on_click: fun=
ction() {
+ self.phylovizView.zoomAndPan({ zoom : "-"});
+ } },
+ { icon_class: 'arrow-circle', title: 'Reset Zoom/Pan', on_=
click: function() {
+ self.phylovizView.zoomAndPan({ zoom : "reset"});
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#phyloVizNavBtns").append(navMenu.$el);
+ }
+});
+
+
+var SettingsMenu =3D UserMenuBase.extend({
+
+ className: 'Settings',
+
+ initialize: function(options){
+ // settings needs to directly interact with the phyloviz model so =
it will get access to it.
+ var self =3D this;
+ self.phyloTree =3D options.phyloTree;
+ self.el =3D $("#SettingsMenu");
+ self.inputs =3D {
+ separation : $("#phyloVizTreeSeparation"),
+ leafHeight : $("#phyloVizTreeLeafHeight"),
+ fontSize : $("#phyloVizTreeFontSize")
+ };
+
+ //init all buttons of settings
+ $("#settingsCloseBtn").off().on("click", function() { self.el.hide=
(); });
+ $("#phylovizResetSettingsBtn").off().on("click", function() { self=
.resetToDefaults(); });
+ $("#phylovizApplySettingsBtn").off().on("click", function() { self=
.apply(); });
+ },
+
+ apply : function(){
+ /**
+ * Applying user values to phylotree model.
+ */
+ var self =3D this;
+ if (!self.isAcceptableValue(self.inputs["separation"], 50, 2500) ||
+ !self.isAcceptableValue(self.inputs["leafHeight"], 5, 30) ||
+ !self.isAcceptableValue(self.inputs["fontSize"], 5, 20)){
+ return;
+ }
+ $.each(self.inputs, function(key, $input){
+ self.phyloTree.set(key, $input.val());
+ });
+ },
+ updateUI : function(){
+ /**
+ * Called to update the values input to that stored in the model
+ */
+ var self =3D this;
+ $.each(self.inputs, function(key, $input){
+ $input.val(self.phyloTree.get(key));
+ });
+ },
+ resetToDefaults : function(){
+ /**
+ * Resets the value of the phyloTree model to its default
+ */
+ $(".bs-tooltip").remove(); // just in case the tool tip was n=
ot removed
+ var self =3D this;
+ $.each(self.phyloTree.defaults, function(key, value) {
+ self.phyloTree.set(key, value);
+ });
+ self.updateUI();
+ },
+
+ render: function(){
+
+ }
+
+});
+
+
+var NodeSelectionView =3D UserMenuBase.extend({
+ /**
+ * View for inspecting node properties and editing them
+ */
+ className: 'Settings',
+
+ initialize : function (options){
+ var self =3D this;
+ self.el =3D $("#nodeSelectionView");
+ self.phyloTree =3D options.phyloTree;
+
+ self.UI =3D {
+ enableEdit : $('#phylovizEditNodesCheck'),
+ saveChanges : $('#phylovizNodeSaveChanges'),
+ cancelChanges : $("#phylovizNodeCancelChanges"),
+ name : $("#phyloVizSelectedNodeName"),
+ dist : $("#phyloVizSelectedNodeDist"),
+ annotation : $("#phyloVizSelectedNodeAnnotation")
+ };
+
+ self.valuesOfConcern =3D {
+ name : null,
+ dist : null,
+ annotation : null
+ }; // temporarily stores the values in case user change their mind
+
+ //init UI buttons
+ $("#nodeSelCloseBtn").off().on("click", function() { self.el.hide(=
); });
+ self.UI.saveChanges.off().on("click", function(){ self.updateNodes=
(); });
+ self.UI.cancelChanges.off().on("click", function(){ self.cancelCha=
nges(); });
+
+ (function ($) {
+ // extending jquery fxn for enabling and disabling nodes.
+ $.fn.enable =3D function (isEnabled) {
+ return $(this).each(function () {
+ if(isEnabled){
+ $(this).removeAttr('disabled');
+ } else {
+ $(this).attr('disabled', 'disabled');
+ }
+ });
+ };
+ })(jQuery);
+
+ self.UI.enableEdit.off().on("click", function () {
+ self.toggleUI();
+ });
+ },
+
+ toggleUI : function(){
+ /**
+ * For turning on and off the child elements
+ */
+ var self =3D this,
+ checked =3D self.UI.enableEdit.is(':checked');
+
+ !checked ? self.cancelChanges() : "";
+
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].enable(checked);
+ });
+ if(checked){
+ self.UI.saveChanges.show();
+ self.UI.cancelChanges.show();
+ } else {
+ self.UI.saveChanges.hide();
+ self.UI.cancelChanges.hide();
+ }
+
+ },
+
+ cancelChanges : function() {
+ /**
+ * Reverting to previous values in case user change their minds
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].val(node[key]);
+ });
+ }
+ },
+
+ updateNodes : function (){
+ /**
+ * Changing the data in the underlying tree with user-specified va=
lues
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ if (!self.isAcceptableValue(self.UI.dist, 0, 1) ||
+ self.hasIllegalJsonCharacters(self.UI.name) ||
+ self.hasIllegalJsonCharacters(self.UI.annotation) ) {
+ return;
+ }
+ $.each(self.valuesOfConcern, function(key, value) {
+ (node[key]) =3D self.UI[key].val();
+ });
+ self.phyloTree.set("nodeAttrChangedTime", new Date());
+ } else {
+ alert("No node selected");
+ }
+ }
+
+
+});
+
+
+
+var PhyloVizSearch =3D UserMenuBase.extend({
+ /**
+ * Initializes the search panel on phyloviz and handles its user inter=
action
+ * It allows user to search the entire free based on some qualifer, li=
ke dist <=3D val.
+ */
+ initialize : function () {
+ var self =3D this;
+
+ $("#phyloVizSearchBtn").on("click", function(){
+ var searchTerm =3D $("#phyloVizSearchTerm"),
+ searchConditionVal =3D $("#phyloVizSearchCondition").val()=
.split("-"),
+ attr =3D searchConditionVal[0],
+ condition =3D searchConditionVal[1];
+ self.hasIllegalJsonCharacters(searchTerm);
+
+ if (attr =3D=3D=3D "dist"){
+ self.isAcceptableValue(searchTerm, 0, 1);
+ }
+ self.searchTree(attr, condition, searchTerm.val());
+ });
+ },
+
+ searchTree : function (attr, condition, val){
+ /**
+ * Searches the entire tree and will highlight the nodes that matc=
h the condition in green
+ */
+ d3.selectAll("g.node")
+ .classed("searchHighlight", function(d){
+ var attrVal =3D d[attr];
+ if (typeof attrVal !=3D=3D "undefined" && attrVal !=3D=3D =
null){
+ if (attr =3D=3D=3D "dist"){
+ switch (condition) {
+ case "greaterEqual":
+ return attrVal >=3D +val;
+ case "lesserEqual":
+ return attrVal <=3D +val;
+ default:
+ return;
+ }
+
+ } else if (attr =3D=3D=3D "name" || attr =3D=3D=3D "an=
notation") {
+ return attrVal.toLowerCase().indexOf(val.toLowerCa=
se()) !=3D=3D -1;
+ }
+ }
+ });
+ }
+});
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako
+++ b/templates/dataset/edit_attributes.mako
@@ -58,7 +58,7 @@
Info:
</label><div style=3D"float: left; width: 250px; margin-ri=
ght: 10px;">
- <textarea name=3D"info" cols=3D"40" rows=3D"2">${data.=
info | h}</textarea>
+ <textarea name=3D"info" cols=3D"40" rows=3D"2">${ util=
.unicodify( data.info ) | h}</textarea></div><div style=3D"clear: both"></d=
iv></div>
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/dataset/errors.mako
--- a/templates/dataset/errors.mako
+++ b/templates/dataset/errors.mako
@@ -24,21 +24,21 @@
<% job =3D hda.creating_job_associations[0].job %>
%if job.traceback:
The Galaxy framework encountered the following error while=
attempting to run the tool:
- <pre>${job.traceback | h}</pre>
+ <pre>${ util.unicodify( job.traceback ) | h}</pre>
%endif
%if job.stderr or job.info:
Tool execution generated the following error message:
%if job.stderr:
- <pre>${job.stderr | h}</pre>
+ <pre>${ util.unicodify( job.stderr ) | h}</pre>
%elif job.info:
- <pre>${job.info | h}</pre>
+ <pre>${ util.unicodify( job.info ) | h}</pre>
%endif
%else:
Tool execution did not generate any error messages.
%endif
%if job.stdout:
The tool produced the following additional output:
- <pre>${job.stdout | h}</pre>
+ <pre>${ util.unicodify( job.stdout ) | h}</pre>
%endif
%else:
The tool did not create any additional job / error info.
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -272,6 +272,17 @@
}
=20
init_trackster_links();
+
+ function init_phyloviz_links() {
+ // PhyloViz links
+ // Add to trackster browser functionality
+ $(".phyloviz-add").live("click", function() {
+ var dataset =3D this,
+ dataset_jquery =3D $(this);
+ window.parent.location =3D dataset_jquery.attr("new-url");
+ });
+ }
+ init_phyloviz_links();
=20
// History rename functionality.
async_save_text("history-name-container", "history-name", "${h.url_for=
( controller=3D"/history", action=3D"rename_async", id=3Dtrans.security.enc=
ode_id(history.id) )}", "new_name", 18);
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -29,6 +29,9 @@
## Render the dataset `data` as history item, using `hid` as the displayed=
id
<%def name=3D"render_dataset( data, hid, show_deleted_on_refresh =3D False=
, for_editing =3D True, display_structured =3D False )"><%
+
+ from galaxy.datatypes.xml import Phyloxml
+ from galaxy.datatypes.data import Newick, Nexus
dataset_id =3D trans.security.encode_id( data.id )
=20
if data.state in ['no state','',None]:
@@ -230,6 +233,14 @@
action-url=3D"${h.url_for( controller=3D't=
racks', action=3D'browser', dataset_id=3Ddataset_id)}"
new-url=3D"${h.url_for( controller=3D'trac=
ks', action=3D'index', dataset_id=3Ddataset_id, default_dbkey=3Ddata.dbkey)=
}" title=3D"View in Trackster"></a>
%endif
+ <%
+ isPhylogenyData =3D isinstance(data.datatype, =
(Phyloxml, Nexus, Newick))
+ %>
+ %if isPhylogenyData:
+ <a href=3D"javascript:void(0)" class=3D"i=
con-button chart_curve phyloviz-add"
+ action-url=3D"${h.url_for( controller=
=3D'phyloviz', action=3D'-', dataset_id=3Ddataset_id)}"
+ new-url=3D"${h.url_for( controller=3D'p=
hyloviz', action=3D'index', dataset_id=3Ddataset_id)}" title=3D"View in Phy=
loviz"></a>
+ %endif
%if trans.user:
%if not display_structured:
<div style=3D"float: right">
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/visualization/phyloviz.mako
--- /dev/null
+++ b/templates/visualization/phyloviz.mako
@@ -0,0 +1,320 @@
+<%inherit file=3D"/webapps/galaxy/base_panels.mako"/>
+##
+<%def name=3D"init()">
+ <%
+ self.has_left_panel=3DFalse
+ self.has_right_panel=3DFalse
+ self.active_view=3D"visualization"
+ self.message_box_visible=3DFalse
+ %>
+</%def>
+
+<%def name=3D"stylesheets()">
+ ${parent.stylesheets()}
+ <style>
+
+ .node circle {
+ cursor: pointer;
+ fill: #fff;
+ stroke: steelblue;
+ stroke-width: 1.5px;
+ }
+
+ .node.searchHighlight circle {
+ stroke-width: 3px;
+ stroke: #7adc26;
+ }
+
+ .node.selectedHighlight circle {
+ stroke-width: 3px;
+ stroke: #dc143c;
+ }
+
+ path.link {
+ fill: none;
+ stroke: #B5BBFF;
+ stroke-width: 4.0px;
+ }
+
+
+ div #phyloVizNavContainer{
+ text-align: center;
+ width: 100%;
+ height: 0px;
+ }
+
+ div #phyloVizNav{
+ font-weight: bold;
+ display: inline-block;
+ background: transparent;
+ top: -2em;
+ position: relative;
+ }
+
+ div .navControl{
+ float: left;
+ }
+
+ div#FloatingMenu {
+ left: 0;
+ top: 15%;
+ width:20%;
+ z-index:100;
+ padding: 5px;
+
+ }
+
+ div#SettingsMenu {
+ width: 25%;
+ top: 350px;
+
+ }
+
+ div#nodeSelectionView {
+ width: 25%;
+ top:70px;
+ }
+
+ .Panel {
+ right: 0%;
+ z-index: 101;
+ position: fixed;
+
+ ## Borrowed from galaxy modal_dialogues
+ background-color: white;
+ border: 1px solid #999;
+ border: 1px solid rgba(0, 0, 0, 0.3);
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ }
+
+ span.PhylovizCloseBtn{
+ cursor: pointer;
+ float : right;
+ }
+
+ #PhyloViz{
+ width: 100%;
+ height: 95%;
+ }
+
+ h2.PhyloVizMenuTitle{
+ color: white;
+ }
+
+ ## Settings Menu
+ .SettingMenuRows{
+ margin: 2px 0 2px 0;
+ }
+
+
+ ## Helper Styles
+ .PhyloVizFloatLeft{
+ float : left;
+ }
+ .icon-button.zoom-in,.icon-button.zoom-out{display:inline-block;he=
ight:16px;width:16px;margin-bottom:-3px;cursor:pointer;}
+ .icon-button.zoom-out{background:transparent url(../images/fugue/m=
agnifier-zoom-out.png) center center no-repeat;}
+ .icon-button.zoom-in{margin-left:10px;background:transparent url(.=
./images/fugue/magnifier-zoom.png) center center no-repeat;}
+
+ </style>
+</%def>
+
+
+<%def name=3D"javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "galaxy.panels", "libs/d3", "mvc/data", "viz/visualization", "=
viz/phyloviz")}
+</%def>
+
+
+
+<%def name=3D"center_panel()">
+
+ <div class=3D"unified-panel-header" unselectable=3D"on">
+ <div class=3D"unified-panel-header-inner">
+ <div style=3D"float:left;" id=3D"title"></div>
+ <div style=3D"float:right;" id=3D"panelHeaderRightBtns"></div>
+ </div>
+ <div style=3D"clear: both"></div>
+ </div>
+
+
+ <div id=3D"phyloVizNavContainer">
+ <div id=3D"phyloVizNav">
+ %if config["ext"] =3D=3D "nex" and not config["saved_visualiza=
tion"]:
+ <div id =3D "phylovizNexInfo" class=3D"navControl">
+ <p>Select a tree to view:
+ <select id=3D"phylovizNexSelector">
+ % for tree, index in config["trees"]:
+ <option value=3D"${index}">${tree}</option>
+ % endfor
+ </select>
+ </p>
+ </div>
+ %endif
+ <div id=3D"phyloVizNavBtns" class=3D"navControl">
+ </div>
+ <div class=3D"navControl">
+ <p> | Alt+click to select nodes</p>
+ </div>
+
+
+ </div>
+
+ </div>
+
+ ## Node Selection Menu
+ <div id=3D"nodeSelectionView" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Search / Edit Nodes :
+ <span class=3D"PhylovizCloseBtn" id=3D"nodeSelCloseBtn"> X=
</span>
+ </h3>
+ </div>
+
+ <div class=3D"modal-body">
+
+ <div class=3D"SettingMenuRows">
+ Search for nodes with:
+ <select id=3D"phyloVizSearchCondition" style=3D"width: 55%=
">
+ <option value=3D"name-containing">Name (containing)</o=
ption>
+ <option value=3D"annotation-containing">Annotation (co=
ntaining)</option>
+ <option value=3D"dist-greaterEqual">Distance (>=3D)</o=
ption>
+ <option value=3D"dist-lesserEqual">Distance (<=3D)</op=
tion>
+ </select>
+ <input type=3D"text" id=3D"phyloVizSearchTerm" value=3D"N=
one" size=3D"15" displayLabel=3D"Distance">
+
+ <div class=3D"SettingMenuRows" style=3D"text-align: center=
;">
+ <button id=3D"phyloVizSearchBtn" > Search! </button>
+ </div>
+ </div>
+
+ <br/>
+
+ <div class=3D"SettingMenuRows">
+ Name: <input type=3D"text" id=3D"phyloVizSelectedNodeName"=
value=3D"None" size=3D"15" disabled=3D"disabled" >
+ </div>
+ <div class=3D"SettingMenuRows">
+ Dist: <input type=3D"text" id=3D"phyloVizSelectedNodeDist"=
value=3D"None" size=3D"15" disabled=3D"disabled" displayLabel=3D"Distance">
+ </div>
+ <div class=3D"SettingMenuRows">
+ Annotation:
+ <textarea id=3D"phyloVizSelectedNodeAnnotation" disabled=
=3D"disabled" ></textarea>
+ </div>
+ <div class=3D"SettingMenuRows">
+ Edit: <input type=3D"checkbox" id=3D"phylovizEditNodesChec=
k" value=3D"You can put custom annotations here and it will be saved">
+ <button id=3D"phylovizNodeSaveChanges" style=3D"display: n=
one;"> Save edits</button>
+ <button id=3D"phylovizNodeCancelChanges" style=3D"display:=
none;"> Cancel</button>
+ </div>
+ </div>
+ </div>
+
+ ## Settings Menus
+ <div id=3D"SettingsMenu" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Phyloviz Settings:
+ <span class=3D"PhylovizCloseBtn" id=3D"settingsCloseBtn"> =
X </span>
+ </h3>
+ </div>
+ <div class=3D"modal-body">
+ <div class=3D"SettingMenuRows">
+ Phylogenetic Spacing (px per unit): <input id=3D"phyloVizT=
reeSeparation" type=3D"text" value=3D"250" size=3D"10" displayLabel=3D"Phyl=
ogenetic Separation"> (50-2500)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Vertical Spacing (px): <input type=3D"text" id=3D"phyloViz=
TreeLeafHeight" value=3D"18" size=3D"10" displayLabel=3D"Vertical Spacing">=
(5-30)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Font Size (px): <input type=3D"text" id=3D"phyloVizTreeFon=
tSize" value=3D"12" size=3D"4" displayLabel=3D"Font Size"> (5-20)
+ </div>
+
+ </div>
+ <div class=3D"modal-footer">
+ <button id=3D"phylovizResetSettingsBtn" class=3D"PhyloVizFloat=
Left" > Reset </button>
+ <button id=3D"phylovizApplySettingsBtn" class=3D"PhyloVizFloat=
Right" > Apply </button>
+ </div>
+ </div>
+
+
+
+
+
+
+ <div class=3D"Panel" id=3D"FloatingMenu" style=3D"display: None;">
+
+ <h2>PhyloViz (<a onclick=3D"displayHelp()" href=3D"javascript:void=
(0);">?</a>)</h2>
+ <div style=3D"display: none;">
+ <h2>Summary of Interactions and Functions:</h2>
+ <div class=3D"hint">1. Expansion of Nodes: click or option-cli=
ck to expand or collapse</div>
+ <div class=3D"hint">2. Zooming and translation: mousewheel, bu=
ttons, click and drag, double click. Reset</div>
+ <div class=3D"hint">3. Tooltip: Displays "Name and Size" on mo=
useOver on nodes</div>
+ <div class=3D"hint">4. Minimap: Currently displays an exact bu=
t scaled down replicate of the tree, orange bounding box is correct for lin=
ear only<br/>
+ Can be switched on or off</div>
+ <div class=3D"hint">5. Changing Layouts: Able to change betwee=
n circular and linear layouts.</div>
+
+ </div>
+
+ <h5>Scaling & Rotation:</h5>
+ <button id=3D"phylovizZoomInBtn" class=3D"" > + </button>
+ <button id=3D"phylovizZoomOutBtn" class=3D"" > - </button>
+
+
+ <h5>Translation:</h5>
+ <button id=3D"phylovizTranslateUpBtn" > Up </button>
+ <button id=3D"phylovizTranslateDownBtn" > Down </button>
+ <br/>
+ <button id=3D"phylovizTranslateLeftBtn" > Left </button>
+ <button id=3D"phylovizTranslateRightBtn" > Right </button>
+
+
+
+ <h5>Others:</h5>
+ <button id=3D"phylovizResetBtn" > Reset Zoom/Translate </button>
+ <button id=3D"phylovizSaveBtn" > Save vizualization </button>
+ <button id=3D"phylovizOpenSettingsBtn" > Settings </button>
+ </div>
+
+ <div id=3D"PhyloViz" >
+ </div>
+
+ <script type=3D"text/javascript">
+
+ function initPhyloViz(data, config) {
+ var phyloviz;
+
+ // -- Initialization code |-->
+ phyloviz =3D new PhylovizView({
+ data: data,
+ layout : "Linear",
+ config : config
+ });
+
+ // -- Render viz. --
+ phyloviz.render();
+
+ }
+
+ $(function firstVizLoad(){ // calls when viz is loaded for t=
he first time
+ var config =3D JSON.parse( '${ h.to_json_string( config )}');
+ var data =3D JSON.parse('${h.to_json_string(data)}');
+ initPhyloViz(data, config);
+ });
+
+ </script>
+
+</%def>
+
+
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 templates/webapps/community/index.mako
--- a/templates/webapps/community/index.mako
+++ b/templates/webapps/community/index.mako
@@ -60,34 +60,41 @@
%endif
<div class=3D"toolSectionPad"></div><div class=3D"toolSect=
ionTitle">
- Repositories
+ All Repositories
</div>
- <div class=3D"toolSectionBody">
- <div class=3D"toolSectionBg">
- <div class=3D"toolTitle">
- <a target=3D"galaxy_main" href=3D"${h.url_for(=
controller=3D'repository', action=3D'browse_categories', webapp=3D'communi=
ty' )}">Browse by category</a>
- </div>
- %if trans.user:
- <div class=3D"toolTitle">
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'repository', action=3D'browse_repositories', operation=
=3D'my_repositories', webapp=3D'community' )}">Browse my repositories</a>
- </div>
- <div class=3D"toolTitle">
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'repository', action=3D'browse_invalid_tools', cntrller=
=3D'repository', webapp=3D'community' )}">Browse my invalid tools</a>
- </div>
- %endif
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( control=
ler=3D'repository', action=3D'browse_categories', webapp=3D'community' )}">=
Browse by category</a>
+ </div>
+ %if trans.user:
+ <div class=3D"toolSectionPad"></div>
+ <div class=3D"toolSectionTitle">
+ My Repositories and Tools
</div>
- </div>
- <div class=3D"toolSectionBody">
- <div class=3D"toolSectionBg">
- <div class=3D"toolTitle">
- %if trans.user:
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'repository', action=3D'create_repository', webapp=3D'com=
munity' )}">Create new repository</a>
- %else:
- <a target=3D"galaxy_main" href=3D"${h.url_=
for( controller=3D'/user', action=3D'login', webapp=3D'community' )}">Login=
to create a repository</a>
- %endif
- </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'browse_repositories', operation=3D'reposi=
tories_i_own', webapp=3D'community' )}">Repositories I own</a></div>
- </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'browse_repositories', operation=3D'writab=
le_repositories', webapp=3D'community' )}">My writable repositories</a>
+ </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'browse_invalid_tools', cntrller=3D'reposi=
tory', webapp=3D'community' )}">My invalid tools</a>
+ </div>
+ <div class=3D"toolSectionPad"></div>
+ <div class=3D"toolSectionTitle">
+ Available Actions
+ </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'repository', action=3D'create_repository', webapp=3D'community' =
)}">Create new repository</a>
+ </div>
+ %else:
+ <div class=3D"toolSectionPad"></div>
+ <div class=3D"toolSectionTitle">
+ Available Actions
+ </div>
+ <div class=3D"toolTitle">
+ <a target=3D"galaxy_main" href=3D"${h.url_for( con=
troller=3D'/user', action=3D'login', webapp=3D'community' )}">Login to crea=
te a repository</a>
+ </div>
+ %endif
</div></div></div>
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/1_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/1_nexus.nex
@@ -0,0 +1,87 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on June 12, 2012; 23:00 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Olariaga I., Grebenc T., Salcedo I., & Mart=C3=ADn M.P. 2012. Two new spec=
ies of Hydnum
+with ovoid basidiospores: H. ovoideisporum and H. vesterholtii. Mycologia,=
.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S128=
31]
+
+BEGIN TREES;
+ TITLE Hydnum_ITS_result;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Hydnum_aff_ellipsosporum_RUFHYD1_AJ535304,
+ 2 Hydnum_albidum_ALB_AY817135,
+ 3 Hydnum_albidum_ALBHYD1_AJ534974,
+ 4 Hydnum_albomagnum_ALM_DQ218305,
+ 5 Hydnum_ellipsosporum_ELL_AY817138,
+ 6 Hydnum_ellipsosporum_RUFHYD8_AJ547882,
+ 7 Hydnum_ovoidisporum_12317BIOFungi,
+ 8 Hydnum_ovoidisporum_12683BIOFungi,
+ 9 Hydnum_ovoidisporum_12902BIOFungi,
+ 10 Hydnum_ovoidisporum_14130BIOFungi,
+ 11 Hydnum_repandum_RE1_REP1_AJ889978,
+ 12 Hydnum_repandum_RE1_REP2_AJ889949,
+ 13 Hydnum_repandum_RE1_REP3_AY817136,
+ 14 Hydnum_repandum_RE1_REP6_UDB000025,
+ 15 Hydnum_repandum_RE1_REP7_UDB000096,
+ 16 Hydnum_repandum_RE1_REP8_UDB001479,
+ 17 Hydnum_repandum_RE1_REPHYD10_AJ547888,
+ 18 Hydnum_repandum_RE1_REPHYD11_AJ547886,
+ 19 Hydnum_repandum_RE1_REPHYD1_AJ547871,
+ 20 Hydnum_repandum_RE1_REPHYD3_AJ547874,
+ 21 Hydnum_repandum_RE1_REPHYD4_AJ547876,
+ 22 Hydnum_repandum_RE1_REPHYD5_AJ547875,
+ 23 Hydnum_repandum_RE1_REPHYD6_AJ547877,
+ 24 Hydnum_repandum_RE1_REPHYD7_AJ547878,
+ 25 Hydnum_repandum_RE1_REPHYD8_AJ547881,
+ 26 Hydnum_repandum_RE1_REPHYD9_AJ547883,
+ 27 Hydnum_repandum_RE1_RUFHYD10_AJ547866,
+ 28 Hydnum_repandum_RE1_RUFHYD11_AJ547889,
+ 29 Hydnum_repandum_RE1_RUFHYD9_AJ535305,
+ 30 Hydnum_rufescens_RU1_RUFHYD5_AJ547869,
+ 31 Hydnum_rufescens_RU1_RUFHYD6_AJ547884,
+ 32 Hydnum_rufescens_RU1_RUFHYD7_AJ547870,
+ 33 Hydnum_rufescens_RU2_REP5_DQ367902,
+ 34 Hydnum_rufescens_RU2_RUFHYD2_AJ535301,
+ 35 Hydnum_rufescens_RU3_12901BIOFungi,
+ 36 Hydnum_rufescens_RU3_REP4_DQ218306,
+ 37 Hydnum_rufescens_RU3_RUFHYD3_AJ535303,
+ 38 Hydnum_rufescens_RU3_RUFHYD4_AJ535302,
+ 39 Hydnum_rufescens_RU4_RUFHYD12_AJ839969,
+ 40 Hydnum_rufescens_RU4_RUFHYD16_AJ547868,
+ 41 Hydnum_rufescens_RU4_RUFHYD17_AJ547885,
+ 42 Hydnum_rufescens_RU4_UMB1_DQ367903,
+ 43 Hydnum_rufescens_RU5_12760BIOFungi,
+ 44 Hydnum_rufescens_RU5_ALBHYD2_AJ534975,
+ 45 Hydnum_rufescens_RU5_RUF2_DQ658890,
+ 46 Hydnum_rufescens_RU5_RUF4_UDB001465,
+ 47 Hydnum_rufescens_RU5_RUF5_UDB002423,
+ 48 Hydnum_rufescens_RU5_RUFHYD14_AJ547872,
+ 49 Hydnum_rufescens_RU6_RUF1_AY817137,
+ 50 Hydnum_rufescens_RU6_RUFHYD15_AJ547867,
+ 51 Hydnum_rufescens_wrong_taxonomy_RUF3_AM087246,
+ 52 Hydnum_umbilicatum_UMBHYD1_AJ534972,
+ 53 Hydnum_umbilicatum_UMBHYD2_AJ534973,
+ 54 Hydnum_vesterholtii_10429BIOFungi,
+ 55 Hydnum_vesterholtii_10452BIOFungi,
+ 56 Hydnum_vesterholtii_12330BIOFungi,
+ 57 Hydnum_vesterholtii_12904BIOFungi,
+ 58 Hydnum_vesterholtii_REPHYD12A_AJ547879,
+ 59 Hydnum_vesterholtii_REPHYD12C_AJ783968,
+ 60 Hydnum_vesterholtii_REPHYD13_AJ547887,
+ 61 Sistotrema_muscicola_AJ606040,
+ 62 Sistotrema_alboluteum_AJ606042;
+ TREE Fig._2 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(2:100.0,3:100.0):100.0):60.639999,(((56:100.0,58:100.0,59:100.0=
):84.639999,(54:100.0,55:100.0,57:100.0,60:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(11:100.0,12:100.0,13:100.0,14:100.0,15:100.0,1=
6:100.0,17:100.0,18:100.0,19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,24:1=
00.0,25:100.0,26:100.0):99.93):68.690002,(((33:100.0,34:100.0):49.8050005,(=
35:100.0,36:100.0,37:100.0,38:100.0):99.989998):49.8050005,((7:100.0,8:100.=
0,9:100.0,10:100.0):100.0,(42:100.0,(39:100.0,40:100.0,41:100.0):98.449997)=
:86.790001,((52:100.0,53:100.0):99.93,(1:100.0,(5:97.47999949999999,6:100.0=
):97.47999949999999):100.0):53.310001,(27:100.0,(28:100.0,29:100.0,49:100.0=
,50:100.0):47.404999):47.404999,(43:100.0,44:100.0,45:100.0,46:100.0,47:100=
.0,48:100.0):99.459999):29.245001):29.245001):51.580002):61.540001):49.66);
+ TREE PAUP_1 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(3:100.0,2:100.0):100.0):60.639999,(((58:100.0,59:100.0,56:100.0=
):84.639999,(60:100.0,54:100.0,55:100.0,57:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,2=
4:100.0,25:100.0,26:100.0,17:100.0,18:100.0,11:100.0,12:100.0,13:100.0,14:1=
00.0,15:100.0,16:100.0):99.93):68.690002,((34:100.0,33:100.0):99.610001,(37=
:100.0,38:100.0,35:100.0,36:100.0):99.989998,(42:100.0,(39:100.0,41:100.0,4=
0:100.0):98.449997):86.790001,(8:100.0,7:100.0,9:100.0,10:100.0):100.0,((52=
:100.0,53:100.0):99.93,(1:100.0,(5:100.0,6:100.0):94.959999):100.0):53.3100=
01,(29:100.0,27:100.0,28:100.0,50:100.0,49:100.0):94.809998,(44:100.0,43:10=
0.0,48:100.0,45:100.0,46:100.0,47:100.0):99.459999):58.490002):51.580002):6=
1.540001):49.66);
+
+
+
+END;
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/2_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/2_nexus.nex
@@ -0,0 +1,96 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on August 18, 2012; 12:14 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Naish D., Dyke G., Cau A., & Escuilli=C3=A9 F. 2012. A gigantic bird from =
the Upper Cretaceous
+of Central Asia. Biology Letters, 8(1): 97-100.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S130=
08]
+
+BEGIN TREES;
+ TITLE Imported_trees;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Herrerasaurus,
+ 2 Tawa,
+ 3 Allosaurus,
+ 4 Alvarezsaurus,
+ 5 Anchiornis,
+ 6 Archaeopteryx,
+ 7 Archaeorhynchus,
+ 8 Avimimus,
+ 9 Baryonyx,
+ 10 Beipiaosaurus,
+ 11 Caenagnathus,
+ 12 Caudipteryx,
+ 13 Ceratosaurus,
+ 14 Chirostenotes,
+ 15 Citipati,
+ 16 Compsognathus,
+ 17 Confuciusornis,
+ 18 Dilong,
+ 19 Dilophosaurus,
+ 20 Epidendrosaurus,
+ 21 Epidexipteryx,
+ 22 Erlicosaurus,
+ 23 Eustreptospondylus,
+ 24 Gallimimus,
+ 25 Garudimimus,
+ 26 Gobipteryx,
+ 27 Guanlong,
+ 28 Haplocheirus,
+ 29 Harpymimus,
+ 30 Hebeiornis,
+ 31 Hongshanornis,
+ 32 Huoshanornis,
+ 33 Iberomesornis,
+ 34 Ichthyornis,
+ 35 Incisivosaurus,
+ 36 Jeholornis,
+ 37 Limusaurus,
+ 38 Longicrusavis,
+ 39 Longipteryx,
+ 40 Longirostravis,
+ 41 Majungasaurus,
+ 42 Masiakasaurus,
+ 43 Monolophosaurus,
+ 44 Mononykus,
+ 45 Neornithes,
+ 46 Ornitholestes,
+ 47 Ornithomimus,
+ 48 Patagonykus,
+ 49 Patagopteryx,
+ 50 Pelecanimimus,
+ 51 Pengornis,
+ 52 Protarchaeopteryx,
+ 53 Protopteryx,
+ 54 Rinchenia,
+ 55 Sapeornis,
+ 56 Segnosaurus,
+ 57 Shenzhousaurus,
+ 58 Shuvuuia,
+ 59 Sinornithosaurus,
+ 60 Sinosauropteryx,
+ 61 Sinovenator,
+ 62 Sinraptor,
+ 63 Syntarsus_kayentakatae,
+ 64 Troodon,
+ 65 Tyrannosaurus,
+ 66 Velociraptor,
+ 67 Yanornis,
+ 68 Yixianornis,
+ 69 Zhongjianornis,
+ 70 Zhongornis,
+ 71 Zuolong,
+ 72 Samrukia;
+ TREE Figure_1A =3D [&R] (1,(2,(((((43,(3,62)),(71,((46,((((28,(4,(48=
,(44,58)))),((((5,(61,(64,(59,66)))),(6,((36,(55,(69,(((7,34,45,49,72,(31,3=
8),(67,68)),(33,((32,((26,30),(39,40))),(51,53)))),(17,70))))),(20,21)))),(=
(11,(12,(8,(14,(15,54))))),(35,52))),(10,(22,56)))),(50,(57,(29,(25,(24,47)=
))))),(16,60))),(27,(18,65))))),(9,23)),(13,(41,(37,42)))),(19,63))));
+
+
+
+END;
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/3_phyloxml.xml
--- /dev/null
+++ b/test-data/visualization/phyloviz/3_phyloxml.xml
@@ -0,0 +1,257 @@
+<?xml version=3D"1.0" encoding=3D"UTF-8"?>
+<phyloxml xmlns:xsi=3D"http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation=3D"http://www.phyloxml.org http://www.phyloxml.org/1=
.10/phyloxml.xsd"
+ xmlns=3D"http://www.phyloxml.org">
+ <phylogeny rooted=3D"true">
+ <clade>
+ <clade>
+ <branch_length>0.18105</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.07466</branch_length>
+ <confidence type=3D"unknown">32.0</confidence>
+ <clade>
+ <branch_length>0.26168</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.22058</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.28901</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.06584</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.02309</branch_length>
+ <confidence type=3D"unknown">43.0</confidenc=
e>
+ <clade>
+ <branch_length>0.0746</branch_length>
+ <confidence type=3D"unknown">100.0</confi=
dence>
+ <clade>
+ <branch_length>0.02365</branch_length>
+ <confidence type=3D"unknown">88.0</con=
fidence>
+ <clade>
+ <name>22_MOUSE</name>
+ <branch_length>0.05998</branch_leng=
th>
+ <taxonomy>
+ <code>MOUSE</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>Apaf-1_HUMAN</name>
+ <branch_length>0.01825</branch_leng=
th>
+ <taxonomy>
+ <code>HUMAN</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>12_CANFA</name>
+ <branch_length>0.04683</branch_length>
+ <taxonomy>
+ <code>CANFA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>11_CHICK</name>
+ <branch_length>0.15226</branch_length>
+ <taxonomy>
+ <code>CHICK</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>16_XENLA</name>
+ <branch_length>0.4409</branch_length>
+ <taxonomy>
+ <code>XENLA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.17031</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.10929</branch_length>
+ <confidence type=3D"unknown">100.0</confiden=
ce>
+ <clade>
+ <name>14_FUGRU</name>
+ <branch_length>0.02255</branch_length>
+ <taxonomy>
+ <code>FUGRU</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>15_TETNG</name>
+ <branch_length>0.09478</branch_length>
+ <taxonomy>
+ <code>TETNG</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>17_BRARE</name>
+ <branch_length>0.1811</branch_length>
+ <taxonomy>
+ <code>BRARE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.01594</branch_length>
+ <confidence type=3D"unknown">53.0</confidence>
+ <clade>
+ <branch_length>0.10709</branch_length>
+ <confidence type=3D"unknown">68.0</confidence>
+ <clade>
+ <name>1_BRAFL</name>
+ <branch_length>0.26131</branch_length>
+ <taxonomy>
+ <code>BRAFL</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>18_NEMVE</name>
+ <branch_length>0.38014</branch_length>
+ <taxonomy>
+ <code>NEMVE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>23_STRPU</name>
+ <branch_length>0.48179</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.34475</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>26_STRPU</name>
+ <branch_length>0.36374</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1319">
+ <domain from=3D"18" to=3D"98" confidence=3D"=
3.4E-5">Death</domain>
+ <domain from=3D"189" to=3D"481" confidence=
=3D"1.8E-10">NB-ARC</domain>
+ <domain from=3D"630" to=3D"668" confidence=
=3D"8.2E-5">WD40</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>25_STRPU</name>
+ <branch_length>0.33137</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1947">
+ <domain from=3D"143" to=3D"227" confidence=
=3D"7.4E-5">Death</domain>
+ <domain from=3D"227" to=3D"550" confidence=
=3D"2.0E-13">NB-ARC</domain>
+ <domain from=3D"697" to=3D"736" confidence=
=3D"7.9E-4">WD40</domain>
+ <domain from=3D"745" to=3D"785" confidence=
=3D"1.5">WD40</domain>
+ <domain from=3D"1741" to=3D"1836" confidence=
=3D"2.0">Adeno_VII</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>1.31498</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>CED4_CAEEL</name>
+ <branch_length>0.13241</branch_length>
+ <taxonomy>
+ <code>CAEEL</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"714">
+ <domain from=3D"7" to=3D"90" confidence=3D"9.2E=
-14">CARD</domain>
+ <domain from=3D"116" to=3D"442" confidence=3D"5=
.8E-151">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>31_CAEBR</name>
+ <branch_length>0.04777</branch_length>
+ <taxonomy>
+ <code>CAEBR</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"554">
+ <domain from=3D"1" to=3D"75" confidence=3D"0.00=
46">CARD</domain>
+ <domain from=3D"101" to=3D"427" confidence=3D"2=
.1E-123">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.13172</branch_length>
+ <confidence type=3D"unknown">45.0</confidence>
+ <clade>
+ <branch_length>0.24915</branch_length>
+ <confidence type=3D"unknown">95.0</confidence>
+ <clade>
+ <branch_length>0.76898</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>28_DROPS</name>
+ <branch_length>0.1732</branch_length>
+ <taxonomy>
+ <code>DROPS</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"535">
+ <domain from=3D"112" to=3D"399" confidence=
=3D"1.4E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>Dark_DROME</name>
+ <branch_length>0.18863</branch_length>
+ <taxonomy>
+ <code>DROME</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1421">
+ <domain from=3D"108" to=3D"397" confidence=
=3D"2.1E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>29_AEDAE</name>
+ <branch_length>0.86398</branch_length>
+ <taxonomy>
+ <code>AEDAE</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"423">
+ <domain from=3D"109" to=3D"421" confidence=3D"9=
.3E-6">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>30_TRICA</name>
+ <branch_length>0.97698</branch_length>
+ <taxonomy>
+ <code>TRICA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ </clade>
+ </phylogeny>
+</phyloxml>
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/4_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/4_newick.nhx
@@ -0,0 +1,33 @@
+(((BGIOSIBCA028421_ORYSA:0.423485[&&NHX:S=3DORYSA:O=3DBGIOSIBCA028421.1:G=
=3DBGIOSIBCA028421],
+At5g41150_ARATH:0.273135[&&NHX:S=3DARATH:O=3DAt5g41150.1:G=3DAt5g41150]
+):0.690991[&&NHX:S=3DMagnoliophyta:D=3DN:B=3D100],
+(rad16_SCHPO:0.718598[&&NHX:S=3DSCHPO:O=3DSPCC970.01:G=3DSPCC970.01],
+RAD1_YEAST:1.05456[&&NHX:S=3DYEAST:O=3DYPL022W.1:G=3DYPL022W]
+):0.344838[&&NHX:S=3DAscomycota:D=3DN:B=3D100]
+):0.103849[&&NHX:S=3DEukaryota:D=3DN:B=3D61],
+((((((((ERCC4_HUMAN:0.067531[&&NHX:S=3DHUMAN:O=3DENST00000311895.3:G=3DENS=
G00000175595],
+Ercc4_MOUSE:0.17422[&&NHX:S=3DMOUSE:O=3DENSMUST00000023206.5:G=3DENSMUSG00=
000022545]
+):0.065513[&&NHX:S=3DEuarchontoglires:D=3DN:B=3D100],
+ENSMODT00000006086_MONDO:0.104633[&&NHX:S=3DMONDO:O=3DENSMODT00000006086.2=
:G=3DENSMODG00000004840]
+):0.083764[&&NHX:S=3DTheria:D=3DN:B=3D100],
+Q5ZJP8_CHICK:0.153132[&&NHX:S=3DCHICK:O=3DENSGALT00000004716.2:G=3DENSGALG=
00000002981]
+):0.057998[&&NHX:S=3DAmniota:D=3DN:B=3D100],
+ENSXETT00000024054_XENTR:0.288632[&&NHX:S=3DXENTR:O=3DENSXETT00000024054.2=
:G=3DENSXETG00000010991]
+):0.075713[&&NHX:S=3DTetrapoda:D=3DN:B=3D100],
+(zgc-63468_BRARE:0.2218[&&NHX:S=3DBRARE:O=3DENSDART00000015780.4:G=3DENSDA=
RG00000014161],
+NEWSINFRUT00000137921_FUGRU:0.220441[&&NHX:S=3DFUGRU:O=3DNEWSINFRUT0000013=
7921.3:G=3DNEWSINFRUG00000130312]
+):0.170605[&&NHX:S=3DClupeocephala:D=3DN:B=3D100]
+):0.238713[&&NHX:S=3DEuteleostomi:D=3DN:B=3D100],
+ENSCINT00000011737_CIOIN:0.623567[&&NHX:S=3DCIOIN:O=3DENSCINT00000011737.2=
:G=3DENSCING00000005673]
+):0.07499[&&NHX:S=3DChordata:D=3DN:B=3D100],
+(Sm00.scaff00195.0600_SCHMA:0.784609[&&NHX:S=3DSCHMA:O=3DSm00.scaff00195.0=
600:G=3DSm00.scaff00195.0600],
+(CBG03141_CAEBR:0.093703[&&NHX:S=3DCAEBR:O=3DCBG03141:G=3DCBG03141],
+NP_496498_CAEEL:0.212236[&&NHX:S=3DCAEEL:O=3DC47D12.8.1:G=3DC47D12.8]
+):1.47416[&&NHX:S=3DCaenorhabditis:D=3DN:B=3D94]
+):0.26906[&&NHX:S=3DBilateria:D=3DN:B=3D97]
+):0.071406[&&NHX:S=3DBilateria:D=3DN:B=3D1],
+(mei-9-RA_DROME:0.170289[&&NHX:S=3DDROME:O=3DCG3697-RA.3:G=3DCG3697],
+GA17620-PA_DROPS:0.154817[&&NHX:S=3DDROPS:O=3DGA17620-PA:G=3DGA17620]
+):0.818474[&&NHX:S=3DSophophora:D=3DN:B=3D100]
+):0
+)[&&NHX:S=3DEukaryota:D=3DN];
\ No newline at end of file
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 test-data/visualization/phyloviz/5_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/5_newick.nhx
@@ -0,0 +1,1 @@
+(CAE_ELE_PORCN:0.303421 ,((((DRO_PER_PORCN:0.001000 ,DRO_PSE_PORCN:0.00100=
0 )67:0.141994 ,(DRO_ANA_PORCN:0.111899 ,(DRO_ERE_PORCN:0.030516 ,(DRO_MEL_=
PORCN:0.021127 ,DRO_SEC_PORCN:0.021127 )38:0.030516 )35:0.111899 )18:0.1419=
94 )16:0.162611 ,(DRO_WIL_PORCN:0.152225 ,(DRO_VIR_PORCN:0.085057 ,DRO_MOJ_=
PORCN:0.085057 )24:0.152225 )15:0.162611 )13:0.295081 ,(ANO_GAM_PORCN:0.287=
545 ,((CIO_INT_PORCN:0.100686 ,CIO_SAV_PORCN:0.100686 )19:0.275542 ,((LOA_L=
OA_PORCN:0.036278 ,BRU_MAL_PORCN:0.036278 )29:0.272631 ,(((((DAN_RER_PORCN:=
0.086499 ,((TAK_RUB_PORCN:0.032609 ,TET_NIG_PORCN:0.032609 )32:0.048864 ,(G=
AD_MOR_PORCN:0.039387 ,(ORY_LAT_PORCN:0.031729 ,(GAS_ACU_PORCN:0.021882 ,OR=
E_NIL_PORCN:0.021882 )37:0.031729 )34:0.039387 )28:0.048864 )27:0.086499 )2=
3:0.119618 ,(LAT_CHA_PORCN:0.099348 ,((XEN_LAE_PORCN:0.033333 ,XEN_TRO_PORC=
N:0.033333 )31:0.091250 ,(ANO_CAR_PORCN:0.086538 ,((MON_DOM_PORCN:0.014100 =
,(MAC_EUG_PORCN:0.005423 ,SAR_HAR_PORCN:0.005423 )57:0.014100 )42:0.062862 =
,(ORN_ANA_PORCN:0.057974 ,(GOR_GOR_PORCN:0.033876 ,(FEL_CAT_PORCN:0.022851 =
,(PRO_CAP_PORCN:0.019716 ,(CAV_POR_PORCN:0.018599 ,(ERI_EUR_PORCN:0.015518 =
,((DIP_ORD_PORCN:0.007231 ,(MUS_MUS_PORCN:0.001085 ,(RAT_NOR_PORCN:0.001000=
,CRI_GRI_PORCN:0.001000 )69:0.001085 )64:0.007231 )53:0.012954 ,(DAS_NOV_P=
ORCN:0.011362 ,(LOX_AFR_PORCN:0.010575 ,(CAL_JAC_PORCN:0.010332 ,(OCH_PRI_P=
ORCN:0.010063 ,(MIC_MUR_PORCN:0.009123 ,(SUS_SCR_PORCN:0.008880 ,(MYO_LUC_P=
ORCN:0.008460 ,((CAN_FAM_PORCN:0.005423 ,AIL_MEL_PORCN:0.005423 )58:0.00809=
3 ,((PTE_VAM_PORCN:0.006508 ,BOS_TAU_PORCN:0.006508 )55:0.007494 ,((SPE_TRI=
_PORCN:0.003254 ,TUP_BEL_PORCN:0.003254 )61:0.006929 ,((OTO_GAR_PORCN:0.001=
085 ,(ORY_CUN_PORCN:0.001000 ,TUR_TRU_PORCN:0.001000 )68:0.001085 )65:0.005=
965 ,(EQU_CAB_PORCN:0.003688 ,(MAC_MUL_PORCN:0.002711 ,(PAN_TRO_PORCN:0.001=
446 ,(HOM_SAP_PORCN:0.001085 ,(PON_ABE_PORCN:0.001000 ,NOM_LEU_PORCN:0.0010=
00 )70:0.001085 )66:0.001446 )63:0.002711 )62:0.003688 )60:0.005965 )56:0.0=
06929 )54:0.007494 )52:0.008093 )51:0.008460 )50:0.008880 )49:0.009123 )48:=
0.010063 )47:0.010332 )46:0.010575 )45:0.011362 )44:0.012954 )43:0.015518 )=
41:0.018599 )40:0.019716 )39:0.022851 )36:0.033876 )30:0.057974 )26:0.06286=
2 )25:0.086538 )22:0.091250 )21:0.099348 )20:0.119618 )17:0.214465 ,(BRA_FL=
O_PORCN:0.189220 ,SAC_KOW_PORCN:0.189220 )12:0.214465 )11:0.257058 ,(NEM_VE=
C_PORCN:0.246631 ,AMP_QUE_PORCN:0.246631 )9:0.257058 )8:0.266904 ,(TRI_CAS_=
PORCN:0.259494 ,(PED_HUM_PORCN:0.227009 ,(NAS_VIT_PORCN:0.160241 ,(API_MEL_=
PORCN:0.031851 ,(BOM_TER_PORCN:0.004808 ,BOM_IMP_PORCN:0.004808 )59:0.03185=
1 )33:0.160241 )14:0.227009 )10:0.259494 )7:0.266904 )6:0.272631 )5:0.27554=
2 )4:0.287545 )3:0.295081 )2:0.303421 )1:0.0001;
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 tools/data_source/data_source.py
--- a/tools/data_source/data_source.py
+++ b/tools/data_source/data_source.py
@@ -4,6 +4,7 @@
import socket, urllib, sys, os
from galaxy import eggs #eggs needs to be imported so that galaxy.util can=
find docutils egg...
from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import get_charset_from_http_headers
import galaxy.model # need to import model before sniff to resolve a circu=
lar import dependency
from galaxy.datatypes import sniff
from galaxy.datatypes.registry import Registry
@@ -92,7 +93,7 @@
stop_err( 'The size of the data (%d bytes) you have reques=
ted exceeds the maximum allowed (%d bytes) on this server.' % ( file_size, =
max_file_size ) )
#do sniff stream for multi_byte
try:
- cur_filename, is_multi_byte =3D sniff.stream_to_open_named_fil=
e( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename )
+ cur_filename, is_multi_byte =3D sniff.stream_to_open_named_fil=
e( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, s=
ource_encoding=3Dget_charset_from_http_headers( page.headers ) )
except Exception, e:
stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) )
=20
diff -r 6a644558eed7aba74a74060ccf0aa4ec211809c2 -r 07045f4895170a154c955f4=
8291f2b5e6feb94c2 tools/data_source/upload.py
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -90,7 +90,8 @@
=20
if dataset.type =3D=3D 'url':
try:
- temp_name, dataset.is_multi_byte =3D sniff.stream_to_file( url=
lib.urlopen( dataset.path ), prefix=3D'url_paste' )
+ page =3D urllib.urlopen( dataset.path ) #page will be .close()=
ed by sniff methods
+ temp_name, dataset.is_multi_byte =3D sniff.stream_to_file( pag=
e, prefix=3D'url_paste', source_encoding=3Dutil.get_charset_from_http_heade=
rs( page.headers ) )
except Exception, e:
file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) =
), dataset, json_file )
return
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: inithello: Made download status page more informative, fixed intermittent indexing error.
by Bitbucket 29 Aug '12
by Bitbucket 29 Aug '12
29 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8a8a37bbdc2f/
changeset: 8a8a37bbdc2f
user: inithello
date: 2012-08-29 16:16:11
summary: Made download status page more informative, fixed intermittent indexing error.
affected #: 2 files
diff -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 -r 8a8a37bbdc2fc233446966793b6545d553aa50d8 lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ b/lib/galaxy/tools/genome_index/index_genome.py
@@ -54,6 +54,7 @@
self._log( self.locations )
self._log( 'Indexer %s completed successfully.' % indexer )
self._flush_files()
+ exit(0)
def _check_link( self ):
self._log( 'Checking symlink to %s' % self.fafile )
diff -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 -r 8a8a37bbdc2fc233446966793b6545d553aa50d8 lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -148,7 +148,8 @@
dbkey = build[0]
longname = build[1]
break
- assert dbkey is not '?', 'That build was not found'
+ if dbkey == '?':
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='An invalid build was specified.' )
ftp = ftplib.FTP('hgdownload.cse.ucsc.edu')
ftp.login('anonymous', trans.get_user().email)
checker = []
@@ -189,7 +190,8 @@
dbkeys=trans.ucsc_builds )
elif source == 'Ensembl':
dbkey = params.get( 'ensembl_dbkey', None )
- assert dbkey is not '?', 'That build was not found'
+ if dbkey == '?':
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='An invalid build was specified.' )
for build in trans.ensembl_builds:
if build[ 'dbkey' ] == dbkey:
dbkey = build[ 'dbkey' ]
@@ -199,7 +201,7 @@
break
url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s.%s.dna.toplevel.fa.…' % ( release, pathname.lower(), pathname, dbkey, release )
else:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Somehow an invalid data source was specified.' )
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='An invalid data source was specified.' )
if url is None:
return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Unable to generate a valid URL with the specified parameters.' )
params = dict( protocol='http', name=dbkey, datatype='fasta', url=url, user=trans.user.id )
@@ -248,7 +250,8 @@
sa = trans.app.model.context.current
if jobtype == 'liftover':
job = sa.query( model.TransferJob ).filter_by( id=jobid ).first()
- joblabel = 'Download liftOver'
+ liftover = trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].get_job_status( jobid )
+ joblabel = 'Download liftOver (%s to %s)' % ( liftover.params[ 'from_genome' ], liftover.params[ 'to_genome' ] )
elif jobtype == 'transfer':
job = sa.query( model.TransferJob ).filter_by( id=jobid ).first()
joblabel = 'Download Genome'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/30c7f58f116d/
changeset: 30c7f58f116d
user: dan
date: 2012-08-29 16:03:42
summary: Unicode fixes for annotations
affected #: 2 files
diff -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 lib/galaxy/model/item_attrs.py
--- a/lib/galaxy/model/item_attrs.py
+++ b/lib/galaxy/model/item_attrs.py
@@ -95,7 +95,7 @@
""" Returns a user's annotation string for an item. """
annotation_obj = self.get_item_annotation_obj( db_session, user, item )
if annotation_obj:
- return annotation_obj.annotation
+ return galaxy.util.unicodify( annotation_obj.annotation )
return None
def get_item_annotation_obj( self, db_session, user, item ):
diff -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb -r 30c7f58f116d9e11a209ed8a5d193ec2d91de280 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -644,7 +644,10 @@
dataset = self.get_dataset( trans, id, False, True )
if not dataset:
web.httpexceptions.HTTPNotFound()
- return self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
+ annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
+ if annotation and isinstance( annotation, unicode ):
+ annotation = annotation.encode( 'ascii', 'replace' ) #paste needs ascii here
+ return annotation
@web.expose
def display_at( self, trans, dataset_id, filename=None, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
5 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1aee30671c45/
changeset: 1aee30671c45
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in data source tools. Add util.is_binary() method that returns true when provided string contains a null byte.
affected #: 3 files
diff -r 738b703f725c1ade02018489e1c8e197236e30cd -r 1aee30671c45ecf045e33fe472525b814b97838d lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py
+++ b/lib/galaxy/datatypes/sniff.py
@@ -6,6 +6,7 @@
from galaxy import util
from galaxy.datatypes.checkers import *
from galaxy.datatypes.binary import unsniffable_binary_formats
+from encodings import search_function as encodings_search_function
log = logging.getLogger(__name__)
@@ -15,7 +16,7 @@
full_path = os.path.join(path, 'test', fname)
return full_path
-def stream_to_open_named_file( stream, fd, filename ):
+def stream_to_open_named_file( stream, fd, filename, source_encoding=None, source_error='strict', target_encoding=None, target_error='strict' ):
"""Writes a stream to the provided file descriptor, returns the file's name and bool( is_multi_byte ). Closes file descriptor"""
#signature and behavor is somewhat odd, due to backwards compatibility, but this can/should be done better
CHUNK_SIZE = 1048576
@@ -23,6 +24,10 @@
is_compressed = False
is_binary = False
is_multi_byte = False
+ if not target_encoding or not encodings_search_function( target_encoding ):
+ target_encoding = util.DEFAULT_ENCODING #utf-8
+ if not source_encoding:
+ source_encoding = util.DEFAULT_ENCODING #sys.getdefaultencoding() would mimic old behavior (defaults to ascii)
while 1:
chunk = stream.read( CHUNK_SIZE )
if not chunk:
@@ -42,13 +47,12 @@
chars = chunk[:100]
is_multi_byte = util.is_multi_byte( chars )
if not is_multi_byte:
- for char in chars:
- if ord( char ) > 128:
- is_binary = True
- break
+ is_binary = util.is_binary( chunk )
data_checked = True
if not is_compressed and not is_binary:
- os.write( fd, chunk.encode( "utf-8" ) )
+ if not isinstance( chunk, unicode ):
+ chunk = chunk.decode( source_encoding, source_error )
+ os.write( fd, chunk.encode( target_encoding, target_error ) )
else:
# Compressed files must be encoded after they are uncompressed in the upload utility,
# while binary files should not be encoded at all.
@@ -56,10 +60,10 @@
os.close( fd )
return filename, is_multi_byte
-def stream_to_file( stream, suffix='', prefix='', dir=None, text=False ):
+def stream_to_file( stream, suffix='', prefix='', dir=None, text=False, **kwd ):
"""Writes a stream to a temporary file, returns the temporary file's name"""
fd, temp_name = tempfile.mkstemp( suffix=suffix, prefix=prefix, dir=dir, text=text )
- return stream_to_open_named_file( stream, fd, temp_name )
+ return stream_to_open_named_file( stream, fd, temp_name, **kwd )
def check_newlines( fname, bytes_to_read=52428800 ):
"""
@@ -305,14 +309,9 @@
else:
for hdr in headers:
for char in hdr:
- if len( char ) > 1:
- for c in char:
- if ord( c ) > 128:
- is_binary = True
- break
- elif ord( char ) > 128:
- is_binary = True
- break
+ #old behavior had 'char' possibly having length > 1,
+ #need to determine when/if this occurs
+ is_binary = util.is_binary( char )
if is_binary:
break
if is_binary:
diff -r 738b703f725c1ade02018489e1c8e197236e30cd -r 1aee30671c45ecf045e33fe472525b814b97838d lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -34,6 +34,9 @@
gzip_magic = '\037\213'
bz2_magic = 'BZh'
+DEFAULT_ENCODING = 'utf-8'
+NULL_CHAR = '\000'
+BINARY_CHARS = [ NULL_CHAR ]
from inflection import Inflector, English
inflector = Inflector(English)
@@ -57,6 +60,32 @@
return True
return False
+def is_binary( value, binary_chars=None ):
+ """
+ File is binary if it contains a null-byte by default (e.g. behavior of grep, etc.).
+ This may fail for utf-16 files, but so would ASCII encoding.
+ >>> is_binary( string.printable )
+ False
+ >>> is_binary( '\\xce\\x94' )
+ False
+ >>> is_binary( '\\000' )
+ True
+ """
+ if binary_chars is None:
+ binary_chars = BINARY_CHARS
+ for binary_char in binary_chars:
+ if binary_char in value:
+ return True
+ return False
+
+def get_charset_from_http_headers( headers, default=None ):
+ rval = headers.get('content-type', None )
+ if rval and 'charset=' in rval:
+ rval = rval.split('charset=')[-1].split(';')[0].strip()
+ if rval:
+ return rval
+ return default
+
def synchronized(func):
"""This wrapper will serialize access to 'func' to a single thread. Use it as a decorator."""
def caller(*params, **kparams):
@@ -333,6 +362,17 @@
else:
return amount[0:sfs] + '0'*(len(amount) - sfs)
+def unicodify( value, encoding=DEFAULT_ENCODING, error='replace', default=None ):
+ """
+ Returns a unicode string or None
+ """
+ if isinstance( value, unicode ):
+ return value
+ try:
+ return unicode( value, encoding, error )
+ except:
+ return default
+
def object_to_string( obj ):
return binascii.hexlify( pickle.dumps( obj, 2 ) )
@@ -502,7 +542,7 @@
def recursively_stringify_dictionary_keys( d ):
if isinstance(d, dict):
- return dict([(k.encode('utf-8'), recursively_stringify_dictionary_keys(v)) for k,v in d.iteritems()])
+ return dict([(k.encode( DEFAULT_ENCODING ), recursively_stringify_dictionary_keys(v)) for k,v in d.iteritems()])
elif isinstance(d, list):
return [recursively_stringify_dictionary_keys(x) for x in d]
else:
@@ -622,7 +662,7 @@
Sends an email.
"""
to = listify( to )
- msg = MIMEText( body )
+ msg = MIMEText( body.encode( 'ascii', 'replace' ) )
msg[ 'To' ] = ', '.join( to )
msg[ 'From' ] = frm
msg[ 'Subject' ] = subject
diff -r 738b703f725c1ade02018489e1c8e197236e30cd -r 1aee30671c45ecf045e33fe472525b814b97838d tools/data_source/data_source.py
--- a/tools/data_source/data_source.py
+++ b/tools/data_source/data_source.py
@@ -4,6 +4,7 @@
import socket, urllib, sys, os
from galaxy import eggs #eggs needs to be imported so that galaxy.util can find docutils egg...
from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import get_charset_from_http_headers
import galaxy.model # need to import model before sniff to resolve a circular import dependency
from galaxy.datatypes import sniff
from galaxy.datatypes.registry import Registry
@@ -92,7 +93,7 @@
stop_err( 'The size of the data (%d bytes) you have requested exceeds the maximum allowed (%d bytes) on this server.' % ( file_size, max_file_size ) )
#do sniff stream for multi_byte
try:
- cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename )
+ cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, source_encoding=get_charset_from_http_headers( page.headers ) )
except Exception, e:
stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) )
https://bitbucket.org/galaxy/galaxy-central/changeset/577498958c37/
changeset: 577498958c37
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in upload tool. Significant pre-existing refactoring still needed.
affected #: 1 file
diff -r 1aee30671c45ecf045e33fe472525b814b97838d -r 577498958c37cb3c0702b06a048bb51b2d82257a tools/data_source/upload.py
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -90,7 +90,8 @@
if dataset.type == 'url':
try:
- temp_name, dataset.is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
+ page = urllib.urlopen( dataset.path ) #page will be .close()ed by sniff methods
+ temp_name, dataset.is_multi_byte = sniff.stream_to_file( page, prefix='url_paste', source_encoding=util.get_charset_from_http_headers( page.headers ) )
except Exception, e:
file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
return
https://bitbucket.org/galaxy/galaxy-central/changeset/fca4e12478d6/
changeset: fca4e12478d6
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in dataset edit attributes.
affected #: 1 file
diff -r 577498958c37cb3c0702b06a048bb51b2d82257a -r fca4e12478d62162cb486422c79687587209c639 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako
+++ b/templates/dataset/edit_attributes.mako
@@ -58,7 +58,7 @@
Info:
</label><div style="float: left; width: 250px; margin-right: 10px;">
- <textarea name="info" cols="40" rows="2">${data.info | h}</textarea>
+ <textarea name="info" cols="40" rows="2">${ util.unicodify( data.info ) | h}</textarea></div><div style="clear: both"></div></div>
https://bitbucket.org/galaxy/galaxy-central/changeset/efefe08d6dd0/
changeset: efefe08d6dd0
user: dan
date: 2012-08-29 01:23:39
summary: Handle non-ascii unicode in dataset error page.
affected #: 1 file
diff -r fca4e12478d62162cb486422c79687587209c639 -r efefe08d6dd0c70055a80953007e312e1d85d271 templates/dataset/errors.mako
--- a/templates/dataset/errors.mako
+++ b/templates/dataset/errors.mako
@@ -24,21 +24,21 @@
<% job = hda.creating_job_associations[0].job %>
%if job.traceback:
The Galaxy framework encountered the following error while attempting to run the tool:
- <pre>${job.traceback | h}</pre>
+ <pre>${ util.unicodify( job.traceback ) | h}</pre>
%endif
%if job.stderr or job.info:
Tool execution generated the following error message:
%if job.stderr:
- <pre>${job.stderr | h}</pre>
+ <pre>${ util.unicodify( job.stderr ) | h}</pre>
%elif job.info:
- <pre>${job.info | h}</pre>
+ <pre>${ util.unicodify( job.info ) | h}</pre>
%endif
%else:
Tool execution did not generate any error messages.
%endif
%if job.stdout:
The tool produced the following additional output:
- <pre>${job.stdout | h}</pre>
+ <pre>${ util.unicodify( job.stdout ) | h}</pre>
%endif
%else:
The tool did not create any additional job / error info.
https://bitbucket.org/galaxy/galaxy-central/changeset/80dd03582ea9/
changeset: 80dd03582ea9
user: dan
date: 2012-08-29 01:23:40
summary: Handle non-ascii unicode in dataset error report emails.
affected #: 1 file
diff -r efefe08d6dd0c70055a80953007e312e1d85d271 -r 80dd03582ea9041d0c897b3a2c58e12a5bb494bb lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -203,12 +203,12 @@
job_id=job.id,
job_tool_id=job.tool_id,
job_command_line=job.command_line,
- job_stderr=job.stderr,
- job_stdout=job.stdout,
- job_info=job.info,
- job_traceback=job.traceback,
+ job_stderr=util.unicodify( job.stderr ),
+ job_stdout=util.unicodify( job.stdout ),
+ job_info=util.unicodify( job.info ),
+ job_traceback=util.unicodify( job.traceback ),
email=email,
- message=message )
+ message=util.unicodify( message ) )
frm = to_address
# Check email a bit
email = email.strip()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix handling of ToolSectionLabel objects in the tool panel.
by Bitbucket 28 Aug '12
by Bitbucket 28 Aug '12
28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/738b703f725c/
changeset: 738b703f725c
user: greg
date: 2012-08-28 20:32:04
summary: Fix handling of ToolSectionLabel objects in the tool panel.
affected #: 1 file
diff -r 483cbfc5341a0331cc86185f8821cdecec52a0b4 -r 738b703f725c1ade02018489e1c8e197236e30cd lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -187,7 +187,9 @@
section.elems[ section_key ] = workflow
log.debug( "Loaded workflow: %s %s" % ( workflow_id, workflow.name ) )
elif section_key.startswith( 'label_' ):
- section.elems[ section_key ] = section_val
+ if section_val:
+ section.elems[ section_key ] = section_val
+ log.debug( "Loaded label: %s" % ( section_val.text ) )
self.tool_panel[ key ] = section
def load_integrated_tool_panel_keys( self ):
"""
@@ -215,12 +217,12 @@
section.elems[ key ] = None
elif section_elem.tag == 'label':
key = 'label_%s' % section_elem.get( 'id' )
- section.elems[ key ] = ToolSectionLabel( section_elem )
+ section.elems[ key ] = None
key = 'section_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] = section
elif elem.tag == 'label':
key = 'label_%s' % elem.get( 'id' )
- self.integrated_tool_panel[ key ] = ToolSectionLabel( elem )
+ self.integrated_tool_panel[ key ] = None
def write_integrated_tool_panel_config_file( self ):
"""
Write the current in-memory version of the integrated_tool_panel.xml file to disk. Since Galaxy administrators
@@ -254,10 +256,11 @@
if section_item:
os.write( fd, ' <workflow id="%s" />\n' % section_item.id )
elif section_key.startswith( 'label_' ):
- label_id = section_item.id or ''
- label_text = section_item.text or ''
- label_version = section_item.version or ''
- os.write( fd, ' <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
+ if section_item:
+ label_id = section_item.id or ''
+ label_text = section_item.text or ''
+ label_version = section_item.version or ''
+ os.write( fd, ' <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
os.write( fd, ' </section>\n' )
os.write( fd, '</toolbox>\n' )
os.close( fd )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Don't attempt to run util.umask_fix_perms() on data that is being linked to. Fixes #801.
by Bitbucket 28 Aug '12
by Bitbucket 28 Aug '12
28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/483cbfc5341a/
changeset: 483cbfc5341a
user: natefoo
date: 2012-08-28 19:50:54
summary: Don't attempt to run util.umask_fix_perms() on data that is being linked to. Fixes #801.
affected #: 1 file
diff -r d8bff94d8aac276ff77b65104c958ab5cc9ee243 -r 483cbfc5341a0331cc86185f8821cdecec52a0b4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -471,7 +471,7 @@
job.user.total_disk_usage += bytes
# fix permissions
- for path in [ dp.real_path for dp in self.get_output_fnames() ]:
+ for path in [ dp.real_path for dp in self.get_mutable_output_fnames() ]:
util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
self.sa_session.flush()
log.debug( 'job %d ended' % self.job_id )
@@ -679,6 +679,11 @@
self.compute_outputs()
return self.output_paths
+ def get_mutable_output_fnames( self ):
+ if self.output_paths is None:
+ self.compute_outputs()
+ return filter( lambda dsp: dsp.mutable, self.output_paths )
+
def get_output_hdas_and_fnames( self ):
if self.output_hdas_and_paths is None:
self.compute_outputs()
@@ -686,10 +691,11 @@
def compute_outputs( self ) :
class DatasetPath( object ):
- def __init__( self, dataset_id, real_path, false_path = None ):
+ def __init__( self, dataset_id, real_path, false_path = None, mutable = True ):
self.dataset_id = dataset_id
self.real_path = real_path
self.false_path = false_path
+ self.mutable = mutable
def __str__( self ):
if self.false_path is None:
return self.real_path
@@ -706,13 +712,13 @@
self.output_hdas_and_paths = {}
for name, hda in [ ( da.name, da.dataset ) for da in job.output_datasets + job.output_library_datasets ]:
false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % hda.dataset.id ) )
- dsp = DatasetPath( hda.dataset.id, hda.dataset.file_name, false_path )
+ dsp = DatasetPath( hda.dataset.id, hda.dataset.file_name, false_path, mutable = hda.dataset.external_filename is None )
self.output_paths.append( dsp )
self.output_hdas_and_paths[name] = hda, dsp
if special:
false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % special.dataset.id ) )
else:
- results = [ ( da.name, da.dataset, DatasetPath( da.dataset.dataset.id, da.dataset.file_name ) ) for da in job.output_datasets + job.output_library_datasets ]
+ results = [ ( da.name, da.dataset, DatasetPath( da.dataset.dataset.id, da.dataset.file_name, mutable = da.dataset.dataset.external_filename is None ) ) for da in job.output_datasets + job.output_library_datasets ]
self.output_paths = [t[2] for t in results]
self.output_hdas_and_paths = dict([(t[0], t[1:]) for t in results])
if special:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Apply the improvements for generation of tool shed URLs to the repository controller.
by Bitbucket 28 Aug '12
by Bitbucket 28 Aug '12
28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d8bff94d8aac/
changeset: d8bff94d8aac
user: greg
date: 2012-08-28 17:49:43
summary: Apply the improvements for generation of tool shed URLs to the repository controller.
affected #: 2 files
diff -r 4b05f621540cb46fd8b120997258008b37050f63 -r d8bff94d8aac276ff77b65104c958ab5cc9ee243 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -392,8 +392,9 @@
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url, 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
+ url = url_join( tool_shed_url,
+ 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -634,8 +635,9 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- url = url_join( tool_shed_url, '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
+ url = url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -954,7 +956,9 @@
repository_ids = kwd.get( 'repository_ids', None )
changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
- url = url_join( tool_shed_url, 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s&webapp=galaxy' % ( repository_ids, changeset_revisions ) )
+ url = url_join( tool_shed_url,
+ 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s&webapp=galaxy' % \
+ ( repository_ids, changeset_revisions ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1097,8 +1101,9 @@
name = repo_info_dict.keys()[ 0 ]
repo_info_tuple = repo_info_dict[ name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- url = url_join( tool_shed_url, 'repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( name, repository_owner, changeset_revision ) )
+ url = url_join( tool_shed_url,
+ 'repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1273,8 +1278,9 @@
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
- url = url_join( tool_shed_url, 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
+ url = url_join( tool_shed_url,
+ 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1350,8 +1356,9 @@
# Get the tool_versions from the tool shed for each tool in the installed change set.
repository = get_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = url_join( tool_shed_url, 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( repository.name, repository.owner, repository.changeset_revision ) )
+ url = url_join( tool_shed_url,
+ 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( repository.name, repository.owner, repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
diff -r 4b05f621540cb46fd8b120997258008b37050f63 -r d8bff94d8aac276ff77b65104c958ab5cc9ee243 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -11,7 +11,7 @@
from galaxy.model.orm import *
from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFIGS
from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path
-from galaxy.util.shed_util import to_html_escaped, update_repository
+from galaxy.util.shed_util import to_html_escaped, update_repository, url_join
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -749,9 +749,10 @@
update = 'true'
no_update = 'false'
else:
- # Start building up the url to redirect back to the calling Galaxy instance.
- url = '%sadmin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '/', qualified=True ) )
- url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % ( repository.name, repository.user.username, changeset_revision )
+ # Start building up the url to redirect back to the calling Galaxy instance.
+ url = url_join( galaxy_url,
+ 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
if changeset_revision == repository.tip:
# If changeset_revision is the repository tip, there are no additional updates.
if from_update_manager:
@@ -1395,10 +1396,9 @@
"""Send the list of repository_ids and changeset_revisions to Galaxy so it can begin the installation process."""
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
# Redirect back to local Galaxy to perform install.
- url = '%sadmin_toolshed/prepare_for_install' % galaxy_url
- url += '?tool_shed_url=%s' % url_for( '/', qualified=True )
- url += '&repository_ids=%s' % ','.join( util.listify( repository_ids ) )
- url += '&changeset_revisions=%s' % ','.join( util.listify( changeset_revisions ) )
+ url = url_join( galaxy_url,
+ 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+ ( url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4b05f621540c/
changeset: 4b05f621540c
user: inithello
date: 2012-08-28 17:32:17
summary: Improved generation of tool shed URLs
affected #: 2 files
diff -r c87370c5340d359d72301234a958c138adfecd7e -r 4b05f621540cb46fd8b120997258008b37050f63 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -454,7 +454,7 @@
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
+ return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed datatypes_config."""
tree = ElementTree.parse( datatypes_config )
@@ -993,7 +993,7 @@
break
return converter_path, display_path
def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
- url = '%s/repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
+ url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( name, owner, changeset_revision ) )
response = urllib2.urlopen( url )
ctx_rev = response.read()
response.close()
@@ -1221,8 +1221,8 @@
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = '%s/repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
+ url = url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository.name, repository.owner, repository.installed_changeset_revision ) )
try:
response = urllib2.urlopen( url )
encoded_update_dict = response.read()
@@ -1645,3 +1645,8 @@
tool_shed_repository.status = status
sa_session.add( tool_shed_repository )
sa_session.flush()
+def url_join( *args ):
+ parts = []
+ for arg in args:
+ parts.append( arg.strip( '/' ) )
+ return '/'.join( parts )
diff -r c87370c5340d359d72301234a958c138adfecd7e -r 4b05f621540cb46fd8b120997258008b37050f63 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -374,7 +374,7 @@
def browse_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = '%srepository/browse_valid_categories?galaxy_url=%s&webapp=galaxy' % ( tool_shed_url, galaxy_url )
+ url = url_join( tool_shed_url, 'repository/browse_valid_categories?galaxy_url=%s&webapp=galaxy' % ( galaxy_url ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -392,8 +392,8 @@
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = '%s/repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
+ url = url_join( tool_shed_url, 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -467,14 +467,14 @@
def find_tools_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = '%srepository/find_tools?galaxy_url=%s&webapp=galaxy' % ( tool_shed_url, galaxy_url )
+ url = url_join( tool_shed_url, 'repository/find_tools?galaxy_url=%s&webapp=galaxy' % galaxy_url )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
def find_workflows_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
- url = '%srepository/find_workflows?galaxy_url=%s&webapp=galaxy' % ( tool_shed_url, galaxy_url )
+ url = url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s&webapp=galaxy' % galaxy_url )
return trans.response.send_redirect( url )
def generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
@@ -489,7 +489,7 @@
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
tool_shed_url = clean_tool_shed_url( tool_shed_url )
- return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_revision )
+ return url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -634,8 +634,8 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision )
+ url = url_join( tool_shed_url, '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -954,7 +954,7 @@
repository_ids = kwd.get( 'repository_ids', None )
changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
- url = '%srepository/get_repository_information?repository_ids=%s&changeset_revisions=%s&webapp=galaxy' % ( tool_shed_url, repository_ids, changeset_revisions )
+ url = url_join( tool_shed_url, 'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s&webapp=galaxy' % ( repository_ids, changeset_revisions ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1097,8 +1097,8 @@
name = repo_info_dict.keys()[ 0 ]
repo_info_tuple = repo_info_dict[ name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- url = '%srepository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, name, repository_owner, changeset_revision )
+ url = url_join( tool_shed_url, 'repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
@@ -1273,8 +1273,8 @@
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
- url = '%s/repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision )
+ url = url_join( tool_shed_url, 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1350,8 +1350,8 @@
# Get the tool_versions from the tool shed for each tool in the installed change set.
repository = get_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
+ url = url_join( tool_shed_url, 'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( repository.name, repository.owner, repository.changeset_revision ) )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -1522,7 +1522,7 @@
def __generate_clone_url( self, trans, repository ):
"""Generate the URL for cloning a repository."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
+ return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
## ---- Utility methods -------------------------------------------------------
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Add the ability to browse writable repositories in a tool shed.
by Bitbucket 28 Aug '12
by Bitbucket 28 Aug '12
28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c87370c5340d/
changeset: c87370c5340d
user: greg
date: 2012-08-28 17:01:07
summary: Add the ability to browse writable repositories in a tool shed.
affected #: 2 files
diff -r 5adbc8515631847cb66378ea28e644344d890265 -r c87370c5340d359d72301234a958c138adfecd7e lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -246,6 +246,25 @@
grids.GridAction( "User preferences", dict( controller='user', action='index', cntrller='repository', webapp='community' ) )
]
+class WritableRepositoryListGrid( RepositoryListGrid ):
+ def build_initial_query( self, trans, **kwd ):
+ # TODO: improve performance by adding a db table associating users with repositories for which they have write access.
+ username = kwd[ 'username' ]
+ clause_list = []
+ for repository in trans.sa_session.query( self.model_class ):
+ allow_push_usernames = repository.allow_push.split( ',' )
+ if username in allow_push_usernames:
+ clause_list.append( self.model_class.table.c.id == repository.id )
+ if clause_list:
+ return trans.sa_session.query( self.model_class ) \
+ .filter( or_( *clause_list ) ) \
+ .join( model.User.table ) \
+ .outerjoin( model.RepositoryCategoryAssociation.table ) \
+ .outerjoin( model.Category.table )
+ # Return an empty query.
+ return trans.sa_session.query( self.model_class ) \
+ .filter( self.model_class.table.c.id < 0 )
+
class ValidRepositoryListGrid( RepositoryListGrid ):
class CategoryColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
@@ -393,6 +412,7 @@
email_alerts_repository_list_grid = EmailAlertsRepositoryListGrid()
category_list_grid = CategoryListGrid()
valid_category_list_grid = ValidCategoryListGrid()
+ writable_repository_list_grid = WritableRepositoryListGrid()
def __add_hgweb_config_entry( self, trans, repository, repository_path ):
# Add an entry in the hgweb.config file for a new repository. An entry looks something like:
@@ -519,12 +539,15 @@
repository_id = kwd.get( 'id', None )
repository = get_repository( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
- elif operation == "my_repositories":
+ elif operation == "repositories_i_own":
# Eliminate the current filters if any exist.
for k, v in kwd.items():
if k.startswith( 'f-' ):
del kwd[ k ]
kwd[ 'f-email' ] = trans.user.email
+ elif operation == "writable_repositories":
+ kwd[ 'username' ] = trans.user.username
+ return self.writable_repository_list_grid( trans, **kwd )
elif operation == "repositories_by_category":
# Eliminate the current filters if any exist.
for k, v in kwd.items():
diff -r 5adbc8515631847cb66378ea28e644344d890265 -r c87370c5340d359d72301234a958c138adfecd7e templates/webapps/community/index.mako
--- a/templates/webapps/community/index.mako
+++ b/templates/webapps/community/index.mako
@@ -60,34 +60,41 @@
%endif
<div class="toolSectionPad"></div><div class="toolSectionTitle">
- Repositories
+ All Repositories
</div>
- <div class="toolSectionBody">
- <div class="toolSectionBg">
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories', webapp='community' )}">Browse by category</a>
- </div>
- %if trans.user:
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories', operation='my_repositories', webapp='community' )}">Browse my repositories</a>
- </div>
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_invalid_tools', cntrller='repository', webapp='community' )}">Browse my invalid tools</a>
- </div>
- %endif
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories', webapp='community' )}">Browse by category</a>
+ </div>
+ %if trans.user:
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionTitle">
+ My Repositories and Tools
</div>
- </div>
- <div class="toolSectionBody">
- <div class="toolSectionBg">
- <div class="toolTitle">
- %if trans.user:
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='create_repository', webapp='community' )}">Create new repository</a>
- %else:
- <a target="galaxy_main" href="${h.url_for( controller='/user', action='login', webapp='community' )}">Login to create a repository</a>
- %endif
- </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories', operation='repositories_i_own', webapp='community' )}">Repositories I own</a></div>
- </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories', operation='writable_repositories', webapp='community' )}">My writable repositories</a>
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_invalid_tools', cntrller='repository', webapp='community' )}">My invalid tools</a>
+ </div>
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionTitle">
+ Available Actions
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='create_repository', webapp='community' )}">Create new repository</a>
+ </div>
+ %else:
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionTitle">
+ Available Actions
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='/user', action='login', webapp='community' )}">Login to create a repository</a>
+ </div>
+ %endif
</div></div></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

28 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5adbc8515631/
changeset: 5adbc8515631
user: jgoecks
date: 2012-08-28 15:05:31
summary: Fixes for phyloviz parser and a4c7aeb61c7b.
affected #: 2 files
diff -r a4c7aeb61c7b8e76fcf3fd96d0d351be88d1c1d1 -r 5adbc8515631847cb66378ea28e644344d890265 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- a/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -1,6 +1,6 @@
from newickparser import Newick_Parser
from nexusparser import Nexus_Parser
-#from phyloxmlparser import Phyloxml_Parser
+from phyloxmlparser import Phyloxml_Parser
class Phyloviz_DataProvider(object):
@@ -16,9 +16,9 @@
if fileExt == "nhx": # parses newick files
newickParser = Newick_Parser()
jsonDicts, parseMsg = newickParser.parseFile(filepath)
- #elif fileExt == "phyloxml": # parses phyloXML files
- # phyloxmlParser = Phyloxml_Parser()
- # jsonDicts, parseMsg = phyloxmlParser.parseFile(filepath)
+ elif fileExt == "phyloxml": # parses phyloXML files
+ phyloxmlParser = Phyloxml_Parser()
+ jsonDicts, parseMsg = phyloxmlParser.parseFile(filepath)
elif fileExt == "nex": # parses nexus files
nexusParser = Nexus_Parser()
jsonDicts, parseMsg = nexusParser.parseFile(filepath)
diff -r a4c7aeb61c7b8e76fcf3fd96d0d351be88d1c1d1 -r 5adbc8515631847cb66378ea28e644344d890265 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- a/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
+++ b/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
@@ -1,8 +1,5 @@
from baseparser import Base_Parser, PhyloTree, Node
-
-'''
-TOD0: use native Python XML parser.
-from lxml import etree
+from xml.etree import ElementTree
class Phyloxml_Parser(Base_Parser):
"""Parses a phyloxml file into a json file that will be passed to PhyloViz for display"""
@@ -22,7 +19,7 @@
"""passes a file and extracts its Phylogeny Tree content."""
phyloXmlFile = open(filePath, "r")
- xmlTree = etree.parse(phyloXmlFile)
+ xmlTree = ElementTree.parse(phyloXmlFile)
xmlRoot = xmlTree.getroot()[0]
self.nameSpaceIndex = xmlRoot.tag.rfind("}") + 1 # used later by the clean tag method to remove the name space in every element.tag
@@ -134,16 +131,4 @@
def cleanTag(self, tagString):
return tagString[self.nameSpaceIndex:]
-
-
-if __name__=="__main__":
-
- # Files tested against
- parser = Phyloxml_Parser()
- filepath = "../data/" +"apaf.xml"
- # filepath = "../data/" +"12_multiple_supports.xml"
-
- # filepath = "../data/" +"bcl_2.xml"
- # filepath = "../data/" +"reducedXml.xml"
- parser.parseFile(filepath)
-'''
\ No newline at end of file
+
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Comment out phyloviz XML parser until native Python XML parsing is used.
by Bitbucket 27 Aug '12
by Bitbucket 27 Aug '12
27 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a4c7aeb61c7b/
changeset: a4c7aeb61c7b
user: jgoecks
date: 2012-08-27 21:44:01
summary: Comment out phyloviz XML parser until native Python XML parsing is used.
affected #: 2 files
diff -r 75a03bacdc7a3dc5b1c03f8b02df0ab383366955 -r a4c7aeb61c7b8e76fcf3fd96d0d351be88d1c1d1 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- a/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -1,6 +1,6 @@
from newickparser import Newick_Parser
from nexusparser import Nexus_Parser
-from phyloxmlparser import Phyloxml_Parser
+#from phyloxmlparser import Phyloxml_Parser
class Phyloviz_DataProvider(object):
@@ -16,9 +16,9 @@
if fileExt == "nhx": # parses newick files
newickParser = Newick_Parser()
jsonDicts, parseMsg = newickParser.parseFile(filepath)
- elif fileExt == "phyloxml": # parses phyloXML files
- phyloxmlParser = Phyloxml_Parser()
- jsonDicts, parseMsg = phyloxmlParser.parseFile(filepath)
+ #elif fileExt == "phyloxml": # parses phyloXML files
+ # phyloxmlParser = Phyloxml_Parser()
+ # jsonDicts, parseMsg = phyloxmlParser.parseFile(filepath)
elif fileExt == "nex": # parses nexus files
nexusParser = Nexus_Parser()
jsonDicts, parseMsg = nexusParser.parseFile(filepath)
diff -r 75a03bacdc7a3dc5b1c03f8b02df0ab383366955 -r a4c7aeb61c7b8e76fcf3fd96d0d351be88d1c1d1 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- a/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
+++ b/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
@@ -1,4 +1,7 @@
from baseparser import Base_Parser, PhyloTree, Node
+
+'''
+TOD0: use native Python XML parser.
from lxml import etree
class Phyloxml_Parser(Base_Parser):
@@ -143,3 +146,4 @@
# filepath = "../data/" +"bcl_2.xml"
# filepath = "../data/" +"reducedXml.xml"
parser.parseFile(filepath)
+'''
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f6d9557b6d77/
changeset: f6d9557b6d77
user: Tomithy
date: 2012-08-26 12:22:23
summary: re-integrated phyloviz to a new fork of galaxy-central with a =
tat of minor touch up; save and edit now works
affected #: 19 files
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -719,7 +719,49 @@
pass
=20
class Newick( Text ):
- pass
+ """New Hampshire/Newick Format"""
+ file_ext =3D "nhx"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """ Returning false as the newick format is too general and cannot=
be sniffed."""
+ return False
+
+
+class Nexus( Text ):
+ """Nexus format as used By Paup, Mr Bayes, etc"""
+ file_ext =3D "nex"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """All Nexus Files Simply puts a '#NEXUS' in its first line"""
+ f =3D open(filename, "r")
+ firstline =3D f.readline().upper()
+ f.close()
+
+ if "#NEXUS" in firstline:
+ return True
+ else:
+ return False
+
=20
# ------------- Utility methods --------------
=20
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -76,3 +76,24 @@
dataset.blurb =3D 'file purged from disk'
def sniff( self, filename ):
return False
+
+class Phyloxml( GenericXml ):
+ """Format for defining phyloxml data http://www.phyloxml.org/"""
+ file_ext =3D "phyloxml"
+ def set_peek( self, dataset, is_multi_byte=3DFalse ):
+ """Set the peek and blurb text"""
+ if not dataset.dataset.purged:
+ dataset.peek =3D data.get_file_peek( dataset.file_name, is_mul=
ti_byte=3Dis_multi_byte )
+ dataset.blurb =3D 'Phyloxml data'
+ else:
+ dataset.peek =3D 'file does not exist'
+ dataset.blurb =3D 'file purged from disk'
+
+ def sniff( self, filename ):
+ """"Checking for keyword - 'phyloxml' always in lowercase in the f=
irst few lines"""
+ f =3D open(filename, "r")
+ firstlines =3D "".join(f.readlines(5))
+ f.close()
+ if "phyloxml" in firstlines:
+ return True
+ return False
\ No newline at end of file
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/visualization/phyloviz/__init__.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/__init__.py
@@ -0,0 +1,1 @@
+__author__ =3D 'Tomithy'
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/visualization/phyloviz/baseparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/baseparser.py
@@ -0,0 +1,125 @@
+import json
+
+class Node(object):
+ """Node class of PhyloTree, which represents a CLAUDE in a phylogeneti=
c tree"""
+ def __init__(self, nodeName, **kwargs):
+ """Creates a node and adds in the typical annotations"""
+ self.name, self.id =3D nodeName, kwargs.get("id", 0)
+ self.depth =3D kwargs.get("depth", 0)
+ self.children =3D []
+
+ self.isInternal =3D kwargs.get("isInternal", 0)
+ self.length, self.bootstrap =3D kwargs.get("length", 0), kwargs.ge=
t("bootstrap", None)
+ self.events =3D kwargs.get("events", "")
+
+ # clean up boot strap values
+ if self.bootstrap =3D=3D -1:
+ self.bootstrap =3D None
+
+ def addChildNode(self, child):
+ """Adds a child node to the current node"""
+ if isinstance(child, Node):
+ self.children.append(child)
+ else:
+ self.children +=3D child
+
+
+ def __str__(self):
+ return self.name + " id:" + str(self.id) + ", depth: " + str(self.=
depth)
+
+
+ def toJson(self):
+ """Converts the data in the node to a dict representation of json"=
""
+ thisJson =3D {
+ "name" : self.name,
+ "id" : self.id,
+ "depth" : self.depth,
+ "dist" : self.length
+ }
+ thisJson =3D self.addChildrenToJson(thisJson)
+ thisJson =3D self.addMiscToJson(thisJson)
+ return thisJson
+
+ def addChildrenToJson(self, jsonDict):
+ """Needs a special method to addChildren, such that the key does n=
ot appear in the Jsondict when the children is empty
+ this requirement is due to the layout algorithm used by d3 layout =
for hiding subtree """
+ if len(self.children) > 0:
+ children =3D [ node.toJson() for node in self.children]
+ jsonDict["children"] =3D children
+ return jsonDict
+
+
+ def addMiscToJson(self, jsonDict):
+ """Adds other misc attributes to json if they are present"""
+ if not self.events =3D=3D "":
+ jsonDict["events"] =3D self.events
+ if not self.bootstrap =3D=3D None:
+ jsonDict["bootstrap"] =3D self.bootstrap
+ return jsonDict
+
+
+
+class PhyloTree(object):
+ """Standardized python based class to represent the phylogenetic tree =
parsed from different
+ phylogenetic file formats."""
+
+ def __init__(self):
+ self.root, self.rootAttr =3D None, {}
+ self.nodes =3D {}
+ self.title =3D None
+ self.id =3D 1
+
+ def addAttributesToRoot(self, attrDict):
+ """Adds attributes to root, but first we put it in a temp store an=
d bind it with root when .toJson is called"""
+ for key, value in attrDict.items():
+ self.rootAttr[key] =3D value
+
+ def makeNode(self, nodeName, **kwargs):
+ """Called to make a node within PhyloTree, arbitrary kwargs can be=
passed to annotate nodes
+ Tracks the number of nodes via internally incremented id"""
+ kwargs["id"] =3D self.id
+ self.id +=3D 1
+ return Node(nodeName, **kwargs)
+
+ def addRoot(self, root):
+ """Creates a root for phyloTree"""
+ assert isinstance(root, Node)
+ root.parent =3D None
+ self.root =3D root
+
+ def generateJsonableDict(self):
+ """Changes itself into a dictonary by recurssively calling the toj=
son on all its nodes. Think of it
+ as a dict in an array of dict in an array of dict and so on..."""
+ jsonTree =3D ""
+ if self.root:
+ assert isinstance(self.root, Node)
+ jsonTree =3D self.root.toJson()
+ for key, value in self.rootAttr.items():
+ # transfer temporary stored attr to root
+ jsonTree[key] =3D value
+ else:
+ raise Exception("Root is not assigned!")
+ return jsonTree
+
+
+
+class Base_Parser(object):
+ """Base parsers contain all the methods to handle phylogeny tree creat=
ion and
+ converting the data to json that all parsers should have"""
+
+ def __init__(self):
+ self.phyloTrees =3D []
+
+ def parseFile(self, filePath):
+ """Base method that all phylogeny file parser should have"""
+ raise Exception("Base method for phylogeny file parsers is not imp=
lemented")
+
+ def toJson(self, jsonDict):
+ """Convenience method to get a json string from a python json dict=
"""
+ return json.dumps(jsonDict)
+
+ def _writeJsonToFile(self, filepath, json):
+ """Writes the file out to the system"""
+ f =3D open(filepath, "w")
+ f.writelines(json)
+ f.close()
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/visualization/phyloviz/newickparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/newickparser.py
@@ -0,0 +1,185 @@
+from baseparser import Base_Parser, PhyloTree
+import re
+
+class Newick_Parser(Base_Parser):
+ """For parsing trees stored in the newick format (.nhx)
+ It is necessarily more complex because this parser is later extended b=
y Nexus for parsing newick as well.."""
+
+
+ def __init__(self):
+ super(Newick_Parser, self).__init__()
+
+
+ def parseFile(self, filePath):
+ """Parses a newick file to obtain the string inside. Returns: json=
ableDict"""
+ with open(filePath, "r") as newickFile:
+ newickString =3D newickFile.read()
+ newickString =3D newickString.replace("\n", "").replace("\r", =
"")
+ return [self.parseData(newickString)], "Success"
+
+
+ def parseData(self, newickString):
+ """To be called on a newickString directly to parse it. Returns: j=
sonableDict"""
+ return self._parseNewickToJson(newickString)
+
+
+ def _parseNewickToJson(self, newickString, treeName=3DNone, nameMap=3D=
None):
+ """parses a newick representation of a tree into a PhyloTree data =
structure,
+ which can be easily converted to json"""
+ self.phyloTree =3D PhyloTree()
+ newickString =3D self.cleanNewickString(newickString)
+ if nameMap:
+ newickString =3D self._mapName(newickString, nameMap)
+
+ self.phyloTree.root =3D self.parseNode(newickString, 0)
+ if nameMap:
+ self.phyloTree.addAttributesToRoot({"treeName": treeName})
+
+ return self.phyloTree.generateJsonableDict()
+
+
+ def cleanNewickString(self, rawNewick):
+ """removing semi colon, and illegal json characters (\,',") and wh=
ite spaces"""
+ return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
+
+
+ def _makeNodesFromString(self, string, depth):
+ """elements separated by comma could be empty"""
+
+ if string.find("(") !=3D -1:
+ raise Exception("Tree is not well form, location: " + string)
+
+ childrenString =3D string.split(",")
+ childrenNodes =3D []
+
+ for childString in childrenString:
+ if len(childString) =3D=3D 0:
+ continue
+ nodeInfo =3D childString.split(":")
+ name, length, bootstrap =3D "", None, -1
+ if len(nodeInfo) =3D=3D 2: # has length info
+ length =3D nodeInfo[1]
+ # checking for bootstap values
+ name =3D nodeInfo[0]
+ try: # Nexus may bootstrap in names position
+ name =3D float(name)
+ if 0<=3D name <=3D 1:
+ bootstrap =3D name
+ elif 1 <=3D name <=3D 100:
+ bootstrap =3D name / 100
+ name =3D ""
+ except ValueError:
+ name =3D nodeInfo[0]
+ else:
+ name =3D nodeInfo[0] # string only contains name
+ node =3D self.phyloTree.makeNode(name, length=3Dlength, depth=
=3Ddepth, bootstrap=3D bootstrap)
+ childrenNodes +=3D [node]
+ return childrenNodes
+
+
+
+ def _mapName(self, newickString, nameMap):
+ """
+ Necessary to replace names of terms inside nexus representation
+ Also, its here because Mailaud's doesnt deal with id_strings outsi=
de of quotes(" ")
+ """
+ newString =3D ""
+ start =3D 0
+ end =3D 0
+
+ for i in xrange(len(newickString)):
+ if newickString[i] =3D=3D "(" or newickString[i] =3D=3D ",":
+ if re.match(r"[,(]", newickString[i+1:]):
+ continue
+ else:
+ end =3D i + 1
+ # i now refers to the starting position of the term to=
be replaced,
+ # we will next find j which is the ending pos of the t=
erm
+ for j in xrange(i+1, len(newickString)):
+ enclosingSymbol =3D newickString[j] # the immedi=
ate symbol after a common or left bracket which denotes the end of a term
+ if enclosingSymbol =3D=3D ")" or enclosingSymbol =
=3D=3D ":" or enclosingSymbol =3D=3D ",":
+ termToReplace =3D newickString[end:j]
+
+ newString +=3D newickString[start : end] + na=
meMap[termToReplace] #+ "'" "'" +
+ start =3D j
+ break
+
+ newString +=3D newickString[start:]
+ return newString
+
+
+ def parseNode(self, string, depth):
+ """ Recursive method for parsing newick string, works by stripping=
down the string into substring
+ of newick contained with brackers, which is used to call itself.
+ Eg ... ( A, B, (D, E)C, F, G ) ...
+ We will make the preceeding nodes first A, B, then the internal no=
de C, its children D, E,
+ and finally the succeeding nodes F, G"""
+
+ # Base case where there is only an empty string
+ if string =3D=3D "":
+ return
+ # Base case there its only an internal claude
+ if string.find("(") =3D=3D -1:
+ return self._makeNodesFromString(string, depth)
+
+ nodes, children =3D [], [] # nodes refer to the nodes on this=
level, children refers to the child of the
+ start =3D 0
+ lenOfPreceedingInternalNodeString =3D 0
+ bracketStack =3D []
+
+ for j in xrange(len(string)):
+ if string[j] =3D=3D "(": #finding the positions of all the =
open brackets
+ bracketStack.append(j)
+ continue
+ if string[j] =3D=3D ")": #finding the positions of all the =
closed brackets to extract claude
+ i =3D bracketStack.pop()
+
+ if len(bracketStack) =3D=3D 0: # is child of current node
+
+ InternalNode =3D None
+
+ #First flat call to make nodes of the same depth but f=
rom the preceeding string.
+ startSubstring =3D string[start + lenOfPreceedingInter=
nalNodeString: i]
+ preceedingNodes =3D self._makeNodesFromString(startSu=
bstring, depth)
+ nodes +=3D preceedingNodes
+
+ # Then We will try to see if the substring has any int=
ernal nodes first, make it then make nodes preceeding it and succeeding it.
+ if j + 1 < len(string):
+ stringRightOfBracket =3D string[j+1:] # Eg. '=
(b:0.4,a:0.3)c:0.3, stringRightOfBracket =3D c:0.3
+ match =3D re.search(r"[\)\,\(]", stringRightOfBrac=
ket)
+ if match:
+ indexOfNextSymbol =3D match.start()
+ stringRepOfInternalNode =3D stringRightOfBrack=
et[:indexOfNextSymbol]
+ internalNodes =3D self._makeNodesFromString( s=
tringRepOfInternalNode, depth)
+ if len(internalNodes) > 0:
+ InternalNode =3D internalNodes[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ngRepOfInternalNode)
+ else: # sometimes the node can be the last eleme=
nt of a string
+ InternalNode =3D self._makeNodesFromString(str=
ing[j+1:], depth)[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ng) - j
+ if InternalNode =3D=3D None: #creating a generic=
node if it is unnamed
+ InternalNode =3D self.phyloTree.makeNode( "", dept=
h=3Ddepth, isInternal=3DTrue ) #"internal-" + str(depth)
+ lenOfPreceedingInternalNodeString =3D 0
+
+ # recussive call to make the internal claude
+ childSubString =3D string[ i + 1 : j ]
+ InternalNode.addChildNode(self.parseNode(childSubStrin=
g, depth + 1))
+
+ nodes.append(InternalNode) # we append the internal n=
ode later to preserve order
+
+ start =3D j + 1
+ continue
+
+ if depth =3D=3D 0: # if its the root node, we do nothing about =
it and return
+ return nodes[0]
+
+ # Adding last most set of children
+ endString =3D string[start:]
+ if string[start-1] =3D=3D ")": # if the symbol belongs to an inte=
rnal node which is created previously, then we remove it from the string le=
ft to parse
+ match =3D re.search(r"[\)\,\(]", endString)
+ if match:
+ endOfNodeName =3D start + match.start() + 1
+ endString =3D string[endOfNodeName:]
+ nodes +=3D self._makeNodesFromString(endString, depth)
+
+ return nodes
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/visualization/phyloviz/nexusparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/nexusparser.py
@@ -0,0 +1,107 @@
+from newickparser import Newick_Parser
+import re
+
+MAX_READLINES =3D 200000
+
+
+class Nexus_Parser(Newick_Parser):
+
+ def __init__(self):
+ super(Nexus_Parser, self).__init__()
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Nexus content."""
+ return self.parseNexus(filePath)
+
+
+ def parseNexus(self, filename):
+ """ Nexus data is stored in blocks between a line starting with be=
gin and another line starting with end;
+ Commends inside square brackets are to be ignored,
+ For more information: http://wiki.christophchamp.com/index.php/NEX=
US_file_format
+ Nexus can store multiple trees
+ """
+
+ with open( filename, "rt") as nex_file:
+ nexlines =3D nex_file.readlines()
+
+ rowCount =3D 0
+ inTreeBlock =3D False # sentinel to check if we are in a t=
ree block
+ intranslateBlock =3D False # sentinel to check if we are in the=
translate region of the tree. Stores synonyms of the labellings
+ self.inCommentBlock =3D False
+ self.nameMapping =3D None # stores mapping representation us=
ed in nexus format
+ treeNames =3D []
+
+ for line in nexlines:
+ line =3D line.replace(";\n", "")
+ lline =3D line.lower()
+
+ if rowCount > MAX_READLINES or (not nex_file) :
+ break
+ rowCount +=3D1
+ # We are only interested in the tree block.
+ if "begin" in lline and "tree" in lline and not inTreeBlock:
+ inTreeBlock =3D True
+ continue
+ if inTreeBlock and "end" in lline[:3]:
+ inTreeBlock, currPhyloTree =3D False, None
+ continue
+
+ if inTreeBlock:
+
+ if "title" in lline: # Adding title to the tree
+ titleLoc =3D lline.find("title")
+ title =3D line[titleLoc + 5:].replace(" ", "")
+
+ continue
+
+ if "translate" in lline:
+ intranslateBlock =3D True
+ self.nameMapping =3D {}
+ continue
+
+ if intranslateBlock:
+ mappingLine =3D self.splitLinebyWhitespaces(line)
+ key, value =3D mappingLine[1], mappingLine[2].replace(=
",", "").replace("'","") #replacing illegal json characters
+ self.nameMapping[key] =3D value
+
+ # Extracting newick Trees
+ if "tree" in lline:
+ intranslateBlock =3D False
+
+ treeLineCols =3D self.splitLinebyWhitespaces(line)
+ treeName, newick =3D treeLineCols[2], treeLineCols[-1]
+
+ if newick =3D=3D "": # Empty lines can be found in =
tree blocks
+ continue
+
+ currPhyloTree =3D self._parseNewickToJson(newick, tree=
Name, nameMap=3Dself.nameMapping)
+
+ self.phyloTrees.append(currPhyloTree)
+ treeIndex =3D len(self.phyloTrees) - 1
+ treeNames.append( (treeName, treeIndex) ) # appendi=
ng name of tree, and its index
+ continue
+
+ return self.phyloTrees, treeNames
+
+
+ def splitLinebyWhitespaces(self, line):
+ """replace tabs and write spaces to a single write space, so we ca=
n properly split it."""
+ return re.split(r"\s+", line)
+
+
+ def checkComments(self, line):
+ """Check to see if the line/lines is a comment."""
+ if not self.inCommentBlock:
+ if "[" in line:
+ if "]" not in line:
+ self.inCommentBlock =3D True
+ else:
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ else :
+ if "]" in line:
+ if line.rfind("[") > line.rfind("]"):
+ pass # a comment block is closed but an=
other is open.
+ else:
+ self.inCommentBlock =3D False
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ return ""
\ No newline at end of file
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -0,0 +1,35 @@
+from newickparser import Newick_Parser
+from nexusparser import Nexus_Parser
+from phyloxmlparser import Phyloxml_Parser
+
+class Phyloviz_DataProvider(object):
+
+ def __init__(self):
+ pass
+
+ def parseFile(self, filepath, fileExt):
+ """returns [trees], meta
+ Trees are actually an array of JsonDicts. It's usually one tre=
e, except in the case of Nexus
+ """
+ jsonDicts, meta =3D [], {}
+ try:
+ if fileExt =3D=3D "nhx": # parses newick files
+ newickParser =3D Newick_Parser()
+ jsonDicts, parseMsg =3D newickParser.parseFile(filepath)
+ elif fileExt =3D=3D "phyloxml": # parses phyloXML files
+ phyloxmlParser =3D Phyloxml_Parser()
+ jsonDicts, parseMsg =3D phyloxmlParser.parseFile(filepath)
+ elif fileExt =3D=3D "nex": # parses nexus files
+ nexusParser =3D Nexus_Parser()
+ jsonDicts, parseMsg =3D nexusParser.parseFile(filepath)
+ meta["trees"] =3D parseMsg
+ else:
+ raise Exception("File type is not supported")
+
+ meta["msg"] =3D parseMsg
+
+ except Exception:
+ jsonDicts, meta["msg"] =3D [], "Parse failed"
+
+ return jsonDicts, meta
+
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
@@ -0,0 +1,145 @@
+from baseparser import Base_Parser, PhyloTree, Node
+from lxml import etree
+
+class Phyloxml_Parser(Base_Parser):
+ """Parses a phyloxml file into a json file that will be passed to Phyl=
oViz for display"""
+
+ def __init__(self):
+ super(Phyloxml_Parser, self).__init__()
+ self.phyloTree =3D PhyloTree()
+ self.tagsOfInterest =3D {
+ "clade": "",
+ "name" : "name",
+ "branch_length" : "length",
+ "confidence" : "bootstrap",
+ "events" : "events"
+ }
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Phylogeny Tree content."""
+ phyloXmlFile =3D open(filePath, "r")
+
+ xmlTree =3D etree.parse(phyloXmlFile)
+ xmlRoot =3D xmlTree.getroot()[0]
+ self.nameSpaceIndex =3D xmlRoot.tag.rfind("}") + 1 # used later by=
the clean tag method to remove the name space in every element.tag
+
+ phyloRoot =3D None
+ for child in xmlRoot:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ phyloRoot =3D child
+ elif childTag =3D=3D "name":
+ self.phyloTree.title =3D child.text
+
+ self.phyloTree.root =3D self.parseNode(phyloRoot, 0)
+ jsonDict =3D self.phyloTree.generateJsonableDict()
+ return [jsonDict], "Success"
+
+
+ def parseNode(self, node, depth):
+ """Parses any node within a phyloxml tree and looks out for claude=
, which signals the creation of
+ nodes - internal OR leaf"""
+ assert isinstance(node, etree._Element)
+
+ tag =3D self.cleanTag(node.tag)
+ if not tag =3D=3D "clade":
+ return None
+ hasInnerClade =3D False
+
+ # peeking once for parent and once for child to check if the node =
is internal
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ hasInnerClade =3D True
+ break
+
+ if hasInnerClade: # this node is an internal node
+ currentNode =3D self._makeInternalNode(node, depth=3D depth)
+ for child in node:
+ child =3D self.parseNode(child, depth + 1)
+ if isinstance(child, Node):
+ currentNode.addChildNode(child)
+
+ else: # this node is a leaf node
+ currentNode =3D self._makeLeafNode(node, depth=3Ddepth+1)
+
+ return currentNode
+
+
+ def _makeLeafNode(self, leafNode, depth =3D 0 ):
+ """Makes leaf nodes by calling Phylotree methods"""
+ node =3D {}
+ for child in leafNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in self.tagsOfInterest:
+ key =3D self.tagsOfInterest[childTag] # need to map phy=
loxml terms to ours
+ node[key] =3D child.text
+
+ node["depth"] =3D depth
+ return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
+
+ def _getNodeName(self, node, depth=3D-1):
+ """Gets the name of a claude. It handles the case where a taxonomy=
node is involved"""
+
+ def getTagFromTaxonomyNode(node):
+ """Returns the name of a taxonomy node. A taxonomy node have t=
o be treated differently as the name
+ is embedded one level deeper"""
+ phyloxmlTaxoNames =3D {
+ "common_name" : "",
+ "scientific_name" : "",
+ "code" : ""
+ }
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in phyloxmlTaxoNames:
+ return child.text
+ return ""
+
+ nodeName =3D ""
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "name" :
+ nodeName =3D child.text
+ break
+ elif childTag =3D=3D "taxonomy":
+ nodeName =3D getTagFromTaxonomyNode(child)
+ break
+
+ return nodeName
+
+
+ def _makeInternalNode(self, internalNode, depth=3D0):
+ """ Makes an internal node from an element object that is gurantee=
d to be a parent node.
+ Gets the value of interests like events and appends it to a custom=
node object that will be passed to PhyloTree to make nodes
+ """
+ node =3D {}
+ for child in internalNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ continue
+ elif childTag in self.tagsOfInterest:
+ if childTag =3D=3D "events": # events is nested 1 more =
level deeper than others
+ key, text =3D "events", self.cleanTag(child[0].tag)
+ else:
+ key =3D self.tagsOfInterest[childTag]
+ text =3D child.text
+ node[key] =3D text
+
+
+ return self.phyloTree.makeNode(self._getNodeName(internalNode, dep=
th), **node)
+
+
+ def cleanTag(self, tagString):
+ return tagString[self.nameSpaceIndex:]
+
+
+if __name__=3D=3D"__main__":
+
+ # Files tested against
+ parser =3D Phyloxml_Parser()
+ filepath =3D "../data/" +"apaf.xml"
+ # filepath =3D "../data/" +"12_multiple_supports.xml"
+
+ # filepath =3D "../data/" +"bcl_2.xml"
+ # filepath =3D "../data/" +"reducedXml.xml"
+ parser.parseFile(filepath)
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/web/controllers/phyloviz.py
--- /dev/null
+++ b/lib/galaxy/web/controllers/phyloviz.py
@@ -0,0 +1,97 @@
+import pkg_resources
+pkg_resources.require( "bx-python" )
+
+from galaxy.util.json import to_json_string, from_json_string
+from galaxy.web.base.controller import *
+from galaxy.visualization.phyloviz.phyloviz_dataprovider import Phyloviz_D=
ataProvider
+
+
+class PhyloVizController( BaseUIController, UsesVisualizationMixin, UsesHi=
storyDatasetAssociationMixin, SharableMixin ):
+ """
+ Controller for phyloViz browser interface.
+ """
+ def __init__(self, app ):
+ BaseUIController.__init__( self, app )
+
+ @web.expose
+ @web.require_login()
+ def index( self, trans, dataset_id =3D None, **kwargs ):
+ """
+ The index method is called using phyloviz/ with a dataset id passe=
d in.
+ The relevant data set is then retrieved via get_json_from_datasetI=
d which interfaces with the parser
+ The json representation of the phylogenetic tree along with the co=
nfig is then written in the .mako template and passed back to the user
+ """
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id)
+ config["saved_visualization"] =3D False
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D json, config=3Dconfig)
+
+
+ @web.expose
+ def visualization(self, trans, id):
+ """
+ Called using a viz_id (id) to retrieved stored visualization data =
(in json format) and all the viz_config
+ """
+ viz =3D self.get_visualization(trans, id)
+ config =3D self.get_visualization_config(trans, viz)
+ config["saved_visualization"] =3D True
+ data =3D config["root"]
+
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D data, config=3Dconfig)
+
+
+ @web.expose
+ @web.json
+ def load_visualization_json(self, trans, viz_id):
+ """
+ Though not used in current implementation, this provides user with=
a convenient method to retrieve the viz_data & viz_config via json.
+ """
+ viz =3D self.get_visualization(trans, viz_id)
+ viz_config =3D self.get_visualization_config(trans, viz)
+ viz_config["saved_visualization"] =3D True
+ return {
+ "data" : viz_config["root"],
+ "config" : viz_config
+ }
+
+
+ @web.expose
+ @web.json
+ def getJsonData(self, trans, dataset_id, treeIndex=3D0):
+ """
+ Method to retrieve data asynchronously via json format. Retriving =
from here rather than
+ making a direct datasets/ call allows for some processing and even=
t capturing
+ """
+ treeIndex =3D int(treeIndex)
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id, t=
reeIndex)
+ packedJson =3D {
+ "data" : json,
+ "config" : config
+ }
+
+ return packedJson
+
+
+ def get_json_from_datasetId(self, trans, dataset_id, treeIndex=3D0):
+ """
+ For interfacing phyloviz controllers with phyloviz visualization d=
ata provider (parsers)
+ """
+ dataset =3D self.get_dataset(trans, dataset_id)
+ fileExt, filepath =3D dataset.ext, dataset.file_name # .name=
stores the name of the dataset from the orginal upload
+ json, config =3D "", {} # config contains propertie=
s of the tree and file
+
+ if fileExt =3D=3D "json":
+ something, json =3D self.get_data(dataset)
+ else:
+ try:
+ pd =3D Phyloviz_DataProvider()
+ json, config =3D pd.parseFile(filepath, fileExt)
+ json =3D json[treeIndex]
+ except Exception:
+ pass
+
+ config["title"] =3D dataset.display_name()
+ config["ext"] =3D fileExt
+ config["dataset_id"] =3D dataset_id
+ config["treeIndex"] =3D treeIndex
+
+ return json, config
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -16,6 +16,10 @@
action =3D "paramamonster"
elif item.type =3D=3D "circster":
action =3D "circster"
+ elif item.type =3D=3D "phyloviz":
+ # Support phyloviz
+ controller =3D "phyloviz"
+ action =3D "visualization"
return dict( controller=3Dcontroller, action=3Daction, id=3Ditem.i=
d )
=20
# Grid definition
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 static/scripts/viz/phyloviz.js
--- /dev/null
+++ b/static/scripts/viz/phyloviz.js
@@ -0,0 +1,955 @@
+var UserMenuBase =3D Backbone.View.extend({
+ /**
+ * Base class of any menus that takes in user interaction. Contains ch=
ecking methods.
+ */
+
+ className: 'UserMenuBase',
+
+ isAcceptableValue : function ($inputKey, min, max) {
+ /**
+ * Check if an input value is a number and falls within max min.
+ */
+ var self =3D this,
+ value =3D $inputKey.val(),
+ fieldName =3D $inputKey.attr("displayLabel") || $inputKey.attr=
("id").replace("phyloViz", "");
+
+ function isNumeric(n) {
+ return !isNaN(parseFloat(n)) && isFinite(n);
+ }
+
+ if (!isNumeric(value)){
+ alert(fieldName + " is not a number!");
+ return false;
+ }
+
+ if ( value > max){
+ alert(fieldName + " is too large.");
+ return false;
+ } else if ( value < min) {
+ alert(fieldName + " is too small.");
+ return false;
+ }
+ return true;
+ },
+
+ hasIllegalJsonCharacters : function($inputKey) {
+ /**
+ * Check if any user string inputs has illegal characters that jso=
n cannot accept
+ */
+ if ($inputKey.val().search(/"|'|\\/) !=3D=3D -1){
+ alert("Named fields cannot contain these illegal characters: d=
ouble quote(\"), single guote(\'), or back slash(\\). ");
+ return true;
+ }
+ return false;
+ }
+});
+
+
+function PhyloTreeLayout() {
+ /**
+ * -- Custom Layout call for phyloViz to suit the needs of a phylogene=
tic tree.
+ * -- Specifically: 1) Nodes have a display display of (=3D evo dist X=
depth separation) from their parent
+ * 2) Nodes must appear in other after they have expa=
nd and contracted
+ */
+
+ var self =3D this,
+ hierarchy =3D d3.layout.hierarchy().sort(null).value(null),
+ height =3D 360, // ! represents both the layout angle and the heig=
ht of the layout, in px
+ layoutMode =3D "Linear",
+ leafHeight =3D 18, // height of each individual leaf node
+ depthSeparation =3D 200, // separation between nodes of different =
depth, in px
+ leafIndex =3D 0, // change to recurssive call
+ defaultDist =3D 0.5, // tree defaults to 0.5 dist if no dist is sp=
ecified
+ maxTextWidth =3D 50; // maximum length of the text labels
+
+
+ self.leafHeight =3D function(inputLeafHeight){
+ if (typeof inputLeafHeight =3D=3D=3D "undefined"){ return leafHeig=
ht; }
+ else { leafHeight =3D inputLeafHeight; return self;}
+ };
+
+ self.layoutMode =3D function(mode){
+ if (typeof mode =3D=3D=3D "undefined"){ return layoutMode; }
+ else { layoutMode =3D mode; return self;}
+ };
+
+ self.layoutAngle =3D function(angle) { // changes the layout angle =
of the display, which is really changing the height
+ if (typeof angle =3D=3D=3D "undefined"){ return height; }
+ if (isNaN(angle) || angle < 0 || angle > 360) { return self; } // =
to use default if the user puts in strange values
+ else { height =3D angle; return self;}
+ };
+
+ self.separation =3D function(dist){ // changes the dist between the =
nodes of different depth
+ if (typeof dist =3D=3D=3D "undefined"){ return depthSeparation; }
+ else { depthSeparation =3D dist; return self;}
+ };
+
+ self.links =3D function (nodes) { // uses d3 native method to gene=
rate links. Done.
+ return d3.layout.tree().links(nodes);
+ };
+
+ // -- Custom method for laying out phylogeny tree in a linear fashion
+ self.nodes =3D function (d, i) {
+ var _nodes =3D hierarchy.call(self, d, i), // self is to f=
ind the depth of all the nodes, assumes root is passed in
+ nodes =3D [],
+ maxDepth =3D 0,
+ numLeaves =3D 0;
+
+ // changing from hierarchy's custom format for data to usable form=
at
+ _nodes.forEach(function (_node){
+ var node =3D _node.data;
+ node.depth =3D _node.depth;
+ maxDepth =3D node.depth > maxDepth ? node.depth : maxDepth; /=
/finding max depth of tree
+ nodes.push(node);
+ });
+ // counting the number of leaf nodes and assigning max depth to no=
des that do not have children to flush all the leave nodes
+ nodes.forEach(function(node){
+ if ( !node.children ) { //&& !node._children
+ numLeaves +=3D 1;
+ node.depth =3D maxDepth; // if a leaf has no child it woul=
d be assigned max depth
+ }
+ });
+
+ leafHeight =3D layoutMode =3D=3D=3D "Circular" ? height / numLeave=
s : leafHeight;
+ leafIndex =3D 0;
+ layout(nodes[0], maxDepth, leafHeight, null);
+
+ return nodes;
+ };
+
+
+ function layout (node, maxDepth, vertSeparation, parent) {
+ /**
+ * -- Function with side effect of adding x0, y0 to all child; tak=
e in the root as starting point
+ * assuming that the leave nodes would be sorted in presented ord=
er
+ * horizontal(y0) is calculated according to (=3D evo dis=
t X depth separation) from their parent
+ * vertical (x0) - if leave node: find its order in all o=
f the leave node =3D=3D=3D node.id, then multiply by verticalSeparation
+ * - if parent node: is place in the mid point al=
l of its children nodes
+ * -- The layout will first calculate the y0 field going towards t=
he leaves, and x0 when returning
+ */
+ var children =3D node.children,
+ sumChildVertSeparation =3D 0;
+
+ // calculation of node's dist from parents, going down.
+ var dist =3D node.dist || defaultDist;
+ dist =3D dist > 1 ? 1 : dist; // We constrain all dist to be l=
ess than one
+ node.dist =3D dist;
+ if (parent !=3D=3D null){
+ node.y0 =3D parent.y0 + dist * depthSeparation;
+ } else { //root node
+ node.y0 =3D maxTextWidth;
+ }
+
+
+ // if a node have no children, we will treat it as a leaf and star=
t laying it out first
+ if (!children) {
+ node.x0 =3D leafIndex++ * vertSeparation;
+ } else {
+ // if it has children, we will visit all its children and calc=
ulate its position from its children
+ children.forEach( function (child) {
+ child.parent =3D node;
+ sumChildVertSeparation +=3D layout(child, maxDepth, vertSe=
paration, node);
+ });
+ node.x0 =3D sumChildVertSeparation / children.length;
+ }
+
+ // adding properties to the newly created node
+ node.x =3D node.x0;
+ node.y =3D node.y0;
+ return node.x0;
+ }
+ return self;
+}
+
+
+/**
+ * -- PhyloTree Model --
+ */
+var PhyloTree =3D Visualization.extend({
+ defaults : {
+ layout: "Linear",
+ separation : 250, // px dist between nodes of different depth t=
o represent 1 evolutionary until
+ leafHeight: 18,
+ type : "phyloviz", // visualization type
+ title : "Title",
+ scaleFactor: 1,
+ translate: [0,0],
+ fontSize: 12, //fontSize of node label
+ selectedNode : null,
+ nodeAttrChangedTime : 0
+ },
+
+ root : {}, // Root has to be its own independent object because it is =
not part of the viz_config
+
+ toggle : function (d) {
+ /**
+ * Mechanism to expand or contract a single node. Expanded nodes h=
ave a children list, while for
+ * contracted nodes the list is stored in _children. Nodes with th=
eir children data stored in _children will not have their
+ * children rendered.
+ */
+ if(typeof d =3D=3D=3D "undefined") {return ;}
+ if (d.children ) {
+ d._children =3D d.children;
+ d.children =3D null;
+ } else {
+ d.children =3D d._children;
+ d._children =3D null;
+ }
+ },
+
+ toggleAll : function(d) {
+ /**
+ * Contracts the phylotree to a single node by repeatedly calling=
itself to place all the list
+ * of children under _children.
+ */
+ if (d.children && d.children.length !=3D=3D 0) {
+ d.children.forEach(this.toggleAll);
+ toggle(d);
+ }
+ },
+
+ getData : function (){
+ /**
+ * Return the data of the tree. Used for preserving state.
+ */
+ return this.root;
+ },
+
+ save: function() {
+ /**
+ * Overriding the default save mechanism to do some clean of circu=
lar reference of the
+ * phyloTree and to include phyloTree in the saved json
+ */
+ var root =3D this.root;
+ cleanTree(root);
+ this.set("root", root);
+
+ function cleanTree(node){
+ // we need to remove parent to delete circular reference
+ delete node.parent;
+
+ // removing unnecessary attributes
+ if (node._selected){ delete node._selected;}
+
+ node.children ? node.children.forEach(cleanTree) : 0;
+ node._children ? node._children.forEach(cleanTree) : 0;
+ }
+
+ var config =3D jQuery.extend(true, {}, this.attributes);
+ config["selectedNode"] =3D null;
+
+ show_message("Saving to Galaxy", "progress");
+
+ return $.ajax({
+ url: this.url(),
+ type: "POST",
+ dataType: "json",
+ data: {
+ vis_json: JSON.stringify(config)
+ },
+ success: function(res){
+ var viz_id =3D res.url.split("id=3D")[1].split("&")[0],
+ viz_url =3D "/phyloviz/visualization?id=3D" + viz_id;
+ window.history.pushState({}, "", viz_url + window.location=
.hash);
+ hide_modal();
+ }
+ });
+ }
+});
+
+
+
+/**
+ * -- Views --
+ */
+var PhylovizLayoutBase =3D Backbone.View.extend({
+ /**
+ * Stores the default variable for setting up the visualization
+ */
+ defaults : {
+ nodeRadius : 4.5 // radius of each node in the diagram
+ },
+
+
+ stdInit : function (options) {
+ /**
+ * Common initialization in layouts
+ */
+
+ var self =3D this;
+ self.model.on("change:separation change:leafHeight change:fontSize=
change:nodeAttrChangedTime", self.updateAndRender, self);
+
+ self.vis =3D options.vis;
+ self.i =3D 0;
+ self.maxDepth =3D -1; // stores the max depth of the tree
+
+ self.width =3D options.width;
+ self.height =3D options.height;
+ },
+
+
+ updateAndRender : function(source) {
+ /**
+ * Updates the visualization whenever there are changes in the ex=
pansion and contraction of nodes
+ * AND possibly when the tree is edited.
+ */
+ var vis =3D d3.select(".vis"),
+ self =3D this;
+ source =3D source || self.model.root;
+
+ self.renderNodes(source);
+ self.renderLinks(source);
+ self.addTooltips();
+ },
+
+
+ renderLinks : function(source) {
+ /**
+ * Renders the links for the visualization.
+ */
+ var self =3D this;
+ var diagonal =3D self.diagonal;
+ var duration =3D self.duration;
+ var layoutMode =3D self.layoutMode;
+ var link =3D self.vis.selectAll("g.completeLink")
+ .data(self.tree.links(self.nodes), function(d) { return d.targ=
et.id; });
+
+ var calcalateLinePos =3D function(d) {
+ d.pos0 =3D d.source.y0 + " " + d.source.x0; // position of t=
he source node <=3D> starting location of the line drawn
+ d.pos1 =3D d.source.y0 + " " + d.target.x0; // position where=
the line makes a right angle bend
+ d.pos2 =3D d.target.y0 + " " + d.target.x0; // point where=
the horizontal line becomes a dotted line
+ };
+
+ var linkEnter =3D link.enter().insert("svg:g","g.node")
+ .attr("class", "completeLink");
+
+
+ linkEnter.append("svg:path")
+ .attr("class", "link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1;
+ });
+
+ var linkUpdate =3D link.transition().duration(500);
+
+ linkUpdate.select("path.link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1 + " L " + d.pos2;
+ });
+
+ var linkExit =3D link.exit().remove();
+
+ },
+
+ // User Interaction methods below
+
+ selectNode : function(node){
+ /**
+ * Displays the information for editting
+ */
+ var self =3D this;
+ d3.selectAll("g.node")
+ .classed("selectedHighlight", function(d){
+ if (node.id =3D=3D=3D d.id){
+ if(node._selected) { // for de=3Dselecting node.
+ delete node._selected;
+ return false;
+ } else {
+ node._selected =3D true;
+ return true;
+ }
+ }
+ return false;
+ });
+
+ self.model.set("selectedNode", node);
+ $("#phyloVizSelectedNodeName").val(node.name);
+ $("#phyloVizSelectedNodeDist").val(node.dist);
+ $("#phyloVizSelectedNodeAnnotation").val(node.annotation || "");
+ },
+
+ addTooltips : function (){
+ /**
+ * Creates bootstrap tooltip for the visualization. Has to be cal=
led repeatedly due to newly generated
+ * enterNodes
+ */
+ $(".bs-tooltip").remove(); //clean up tooltip, just in case i=
ts listeners are removed by d3
+ $(".node")
+ .attr("data-original-title", function(){
+ var d =3D this.__data__,
+ annotation =3D d.annotation || "None" ;
+ return d ? (d.name ? d.name + "<br/>" : "") + "Dist: " + d=
.dist + " <br/>Annotation: " + annotation: "";
+ })
+ .tooltip({'placement':'top', 'trigger' : 'hover'});
+
+ }
+});
+
+
+
+
+var PhylovizLinearView =3D PhylovizLayoutBase.extend({
+ /**
+ * Linea layout class of Phyloviz, is responsible for rendering the no=
des
+ * calls PhyloTreeLayout to determine the positions of the nodes
+ */
+ initialize : function(options){
+ // Default values of linear layout
+ var self =3D this;
+ self.margins =3D options.margins;
+ self.layoutMode =3D "Linear";
+
+ self.stdInit(options);
+
+ self.layout();
+ self.updateAndRender(self.model.root);
+ },
+
+ layout : function() {
+ /**
+ * Creates the basic layout of a linear tree by precalculating fix=
ed values.
+ * One of calculations are also made here
+ */
+
+ var self =3D this;
+
+ self.tree =3D new PhyloTreeLayout().layoutMode("Linear");
+ self.diagonal =3D d3.svg.diagonal()
+ .projection(function(d) { return [d.y, d.x ]; });
+ },
+
+ renderNodes : function (source) {
+ /**
+ * Renders the nodes base on Linear layout.
+ */
+ var self =3D this,
+ fontSize =3D self.model.get("fontSize") + "px";
+
+ // assigning properties from models
+ self.tree.separation(self.model.get("separation")).leafHeight(self=
.model.get("leafHeight"));
+
+ var duration =3D 500,
+ nodes =3D self.tree.separation(self.model.get("separation")).n=
odes(self.model.root);
+
+ var node =3D self.vis.selectAll("g.node")
+ .data(nodes, function(d) { return d.name + d.id || (d.id =3D +=
+self.i); });
+
+ // These variables has to be passed into update links which are in=
the base methods
+ self.nodes =3D nodes;
+ self.duration =3D duration;
+
+ // ------- D3 ENTRY --------
+ // Enter any new nodes at the parent's previous position.
+ var nodeEnter =3D node.enter().append("svg:g")
+ .attr("class", "node")
+ .on("dblclick", function(){ d3.event.stopPropagation(); })
+ .on("click", function(d) {
+ if (d3.event.altKey) {
+ self.selectNode(d); // display info if alt is p=
ressed
+ } else {
+ if(d.children && d.children.length =3D=3D=3D 0){ retur=
n;} // there is no need to toggle leaves
+ self.model.toggle(d); // contract/expand nodes at da=
ta level
+ self.updateAndRender(d); // re-render the tree
+ }
+ });
+
+ nodeEnter.attr("transform", function(d) { return "translate(" + so=
urce.y0 + "," + source.x0 + ")"; });
+
+ nodeEnter.append("svg:circle")
+ .attr("r", 1e-6)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeEnter.append("svg:text")
+ .attr("class", "nodeLabel")
+ .attr("x", function(d) { return d.children || d._children ? -1=
0 : 10; })
+ .attr("dy", ".35em")
+ .attr("text-anchor", function(d) { return d.children || d._chi=
ldren ? "end" : "start"; })
+ .style("fill-opacity", 1e-6);
+
+ // ------- D3 TRANSITION --------
+ // Transition nodes to their new position.
+ var nodeUpdate =3D node.transition()
+ .duration(duration);
+
+ nodeUpdate.attr("transform", function(d) {
+ return "translate(" + d.y + "," + d.x + ")"; });
+
+ nodeUpdate.select("circle")
+ .attr("r", self.defaults.nodeRadius)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeUpdate.select("text")
+ .style("fill-opacity", 1)
+ .style("font-size", fontSize)
+ .text(function(d) { return d.name; });
+
+ // ------- D3 EXIT --------
+ // Transition exiting nodes to the parent's new position.
+ var nodeExit =3Dnode.exit().transition()
+ .duration(duration)
+ .remove();
+
+ nodeExit.select("circle")
+ .attr("r", 1e-6);
+
+ nodeExit.select("text")
+ .style("fill-opacity", 1e-6);
+
+ // Stash the old positions for transition.
+ nodes.forEach(function(d) {
+ d.x0 =3D d.x; // we need the x0, y0 for parents with children
+ d.y0 =3D d.y;
+ });
+ }
+
+});
+
+var PhylovizView =3D Backbone.View.extend({
+
+ className: 'phyloviz',
+
+ initialize: function(options) {
+ var self =3D this;
+ // -- Default values of the vis
+ self.MIN_SCALE =3D 0.05; //for zooming
+ self.MAX_SCALE =3D 5;
+ self.MAX_DISPLACEMENT =3D 500;
+ self.margins =3D [10, 60, 10, 80];
+
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.radius =3D self.width;
+ self.data =3D options.data;
+
+ // -- Events Phyloviz view responses to
+ $(window).resize(function(){
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.render();
+ });
+
+ // -- Create phyloTree model
+ self.phyloTree =3D new PhyloTree(options.config);
+ self.phyloTree.root =3D self.data;
+
+ // -- Set up UI functions of main view
+ self.zoomFunc =3D d3.behavior.zoom().scaleExtent([self.MIN_SCALE, =
self.MAX_SCALE]);
+ self.zoomFunc.translate(self.phyloTree.get("translate"));
+ self.zoomFunc.scale(self.phyloTree.get("scaleFactor"));
+
+ // -- set up header buttons, search and settings menu
+ self.navMenu =3D new HeaderButtons(self);
+ self.settingsMenu =3D new SettingsMenu({phyloTree : self.phyloTree=
});
+ self.nodeSelectionView =3D new NodeSelectionView({phyloTree : self=
.phyloTree});
+ self.search =3D new PhyloVizSearch();
+
+
+ setTimeout(function(){ // using settimeout to call the zoomAn=
dPan function according to the stored attributes in viz_config
+ self.zoomAndPan();
+ }, 1000);
+ },
+
+ render: function(){
+ // -- Creating helper function for vis. --
+ var self =3D this;
+ $("#PhyloViz").empty();
+
+ // -- Layout viz. --
+ self.mainSVG =3D d3.select("#PhyloViz").append("svg:svg")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("pointer-events", "all")
+ .call(self.zoomFunc.on("zoom", function(){
+ self.zoomAndPan();
+ }));
+
+ self.boundingRect =3D self.mainSVG.append("svg:rect")
+ .attr("class", "boundingRect")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("stroke", "black")
+ .attr("fill", "white");
+
+ self.vis =3D self.mainSVG
+ .append("svg:g")
+ .attr("class", "vis");
+
+ self.layoutOptions =3D {
+ model : self.phyloTree,
+ width : self.width,
+ height : self.height,
+ vis: self.vis,
+ margins: self.margins
+ };
+
+ // -- Creating Title
+ $("#title").text("Phylogenetic Tree from " + self.phyloTree.get("t=
itle") + ":");
+
+ // -- Create Linear view instance --
+ var linearView =3D new PhylovizLinearView(self.layoutOptions)
+ },
+
+ zoomAndPan : function(event){
+ /**
+ * Function to zoom and pan the svg element which the entire tree =
is contained within
+ * Uses d3.zoom events, and extend them to allow manual updates an=
d keeping states in model
+ */
+ if (typeof event !=3D=3D "undefined") {
+ var zoomParams =3D event.zoom,
+ translateParams =3D event.translate;
+ }
+
+ var self =3D this,
+ scaleFactor =3D self.zoomFunc.scale(),
+ translationCoor =3D self.zoomFunc.translate(),
+ zoomStatement =3D "",
+ translateStatement =3D "";
+
+ // Do manual scaling.
+ switch (zoomParams) {
+ case "reset":
+ scaleFactor =3D 1.0;
+ translationCoor =3D [0,0]; break;
+ case "+":
+ scaleFactor *=3D 1.1; break;
+ case "-":
+ scaleFactor *=3D 0.9; break;
+ default:
+ if (typeof zoomParams =3D=3D=3D "number") {
+ scaleFactor =3D zoomParams;
+ } else if (d3.event !=3D=3D null) {
+ scaleFactor =3D d3.event.scale;
+ }
+ }
+ if (scaleFactor < self.MIN_SCALE || scaleFactor > self.MAX_SCALE) =
{ return;}
+ self.zoomFunc.scale(scaleFactor); //update scale Factor
+ zoomStatement =3D "translate(" + self.margins[3] + "," + self.mar=
gins[0] + ")" +
+ " scale(" + scaleFactor + ")";
+
+ // Do manual translation.
+ if( d3.event !=3D=3D null) {
+ translateStatement =3D "translate(" + d3.event.translate + ")";
+ } else {
+ if(typeof translateParams !=3D=3D "undefined") {
+ var x =3D translateParams.split(",")[0];
+ var y =3D translateParams.split(",")[1];
+ if (!isNaN(x) && !isNaN(y)){
+ translationCoor =3D [translationCoor[0] + parseFloat(x=
), translationCoor[1] + parseFloat(y)];
+ }
+ }
+ self.zoomFunc.translate(translationCoor); // update zoomFunc
+ translateStatement =3D "translate(" + translationCoor + ")";
+ }
+
+ self.phyloTree.set("scaleFactor", scaleFactor);
+ self.phyloTree.set("translate", translationCoor);
+ self.vis.attr("transform", translateStatement + zoomStatement); //=
refers to the view that we are actually zooming
+ },
+
+
+ reloadViz : function() {
+ /**
+ * Primes the Ajax URL to load another Nexus tree
+ */
+ var self =3D this,
+ treeIndex =3D $("#phylovizNexSelector :selected").val(),
+ dataset_id =3D self.phyloTree.get("dataset_id"),
+ url =3D "phyloviz/getJsonData?dataset_id=3D" + dataset_id + "&=
treeIndex=3D" + String(treeIndex);
+ $.getJSON(url, function(packedJson){
+ window.initPhyloViz(packedJson.data, packedJson.config);
+ });
+ }
+});
+
+
+var HeaderButtons =3D Backbone.View.extend({
+
+ initialize : function(phylovizView){
+ var self =3D this;
+ self.phylovizView =3D phylovizView;
+
+ // Clean up code - if the class initialized more than once
+ $("#panelHeaderRightBtns").empty();
+ $("#phyloVizNavBtns").empty();
+ $("#phylovizNexSelector").off();
+
+ self.initNavBtns();
+ self.initRightHeaderBtns();
+
+ // Initial a tree selector in the case of nexus
+ $("#phylovizNexSelector").off().on("change", function() {self.phy=
lovizView.reloadViz();} );
+
+ },
+
+ initRightHeaderBtns : function(){
+ var self =3D this;
+
+ rightMenu =3D create_icon_buttons_menu([
+ { icon_class: 'gear', title: 'PhyloViz Settings', on_click: fu=
nction(){
+ $("#SettingsMenu").show();
+ self.settingsMenu.updateUI();
+ } },
+ { icon_class: 'disk', title: 'Save visualization', on_click: f=
unction() {
+ var nexSelected =3D $("#phylovizNexSelector option:selecte=
d").text();
+ if(nexSelected) {
+ self.phylovizView.phyloTree.set("title", nexSelected);
+ }
+ self.phylovizView.phyloTree.save();
+ } },
+ { icon_class: 'chevron-expand', title: 'Search / Edit Nodes', =
on_click: function() {
+ $("#nodeSelectionView").show();
+ } },
+ { icon_class: 'information', title: 'Phyloviz Help', on_click:=
function() {
+ window.open('http://wiki.g2.bx.psu.edu/Learn/Visualization=
/PhylogeneticTree');
+ // https://docs.google.com/document/d/1AXFoJgEpxr21H3LICRs=
3EyMe1B1X_KFPouzIgrCz3zk/edit
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#panelHeaderRightBtns").append(rightMenu.$el);
+ },
+
+ initNavBtns: function() {
+ var self =3D this,
+ navMenu =3D create_icon_buttons_menu([
+ { icon_class: 'zoom-in', title: 'Zoom in', on_click: funct=
ion() {
+ self.phylovizView.zoomAndPan({ zoom : "+"});
+ } },
+ { icon_class: 'zoom-out', title: 'Zoom out', on_click: fun=
ction() {
+ self.phylovizView.zoomAndPan({ zoom : "-"});
+ } },
+ { icon_class: 'arrow-circle', title: 'Reset Zoom/Pan', on_=
click: function() {
+ self.phylovizView.zoomAndPan({ zoom : "reset"});
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#phyloVizNavBtns").append(navMenu.$el);
+ }
+});
+
+
+var SettingsMenu =3D UserMenuBase.extend({
+
+ className: 'Settings',
+
+ initialize: function(options){
+ // settings needs to directly interact with the phyloviz model so =
it will get access to it.
+ var self =3D this;
+ self.phyloTree =3D options.phyloTree;
+ self.el =3D $("#SettingsMenu");
+ self.inputs =3D {
+ separation : $("#phyloVizTreeSeparation"),
+ leafHeight : $("#phyloVizTreeLeafHeight"),
+ fontSize : $("#phyloVizTreeFontSize")
+ };
+
+ //init all buttons of settings
+ $("#settingsCloseBtn").off().on("click", function() { self.el.hide=
(); });
+ $("#phylovizResetSettingsBtn").off().on("click", function() { self=
.resetToDefaults(); });
+ $("#phylovizApplySettingsBtn").off().on("click", function() { self=
.apply(); });
+ },
+
+ apply : function(){
+ /**
+ * Applying user values to phylotree model.
+ */
+ var self =3D this;
+ if (!self.isAcceptableValue(self.inputs["separation"], 50, 2500) ||
+ !self.isAcceptableValue(self.inputs["leafHeight"], 5, 30) ||
+ !self.isAcceptableValue(self.inputs["fontSize"], 5, 20)){
+ return;
+ }
+ $.each(self.inputs, function(key, $input){
+ self.phyloTree.set(key, $input.val());
+ });
+ },
+ updateUI : function(){
+ /**
+ * Called to update the values input to that stored in the model
+ */
+ var self =3D this;
+ $.each(self.inputs, function(key, $input){
+ $input.val(self.phyloTree.get(key));
+ });
+ },
+ resetToDefaults : function(){
+ /**
+ * Resets the value of the phyloTree model to its default
+ */
+ $(".bs-tooltip").remove(); // just in case the tool tip was n=
ot removed
+ var self =3D this;
+ $.each(self.phyloTree.defaults, function(key, value) {
+ self.phyloTree.set(key, value);
+ });
+ self.updateUI();
+ },
+
+ render: function(){
+
+ }
+
+});
+
+
+var NodeSelectionView =3D UserMenuBase.extend({
+ /**
+ * View for inspecting node properties and editing them
+ */
+ className: 'Settings',
+
+ initialize : function (options){
+ var self =3D this;
+ self.el =3D $("#nodeSelectionView");
+ self.phyloTree =3D options.phyloTree;
+
+ self.UI =3D {
+ enableEdit : $('#phylovizEditNodesCheck'),
+ saveChanges : $('#phylovizNodeSaveChanges'),
+ cancelChanges : $("#phylovizNodeCancelChanges"),
+ name : $("#phyloVizSelectedNodeName"),
+ dist : $("#phyloVizSelectedNodeDist"),
+ annotation : $("#phyloVizSelectedNodeAnnotation")
+ };
+
+ self.valuesOfConcern =3D {
+ name : null,
+ dist : null,
+ annotation : null
+ }; // temporarily stores the values in case user change their mind
+
+ //init UI buttons
+ $("#nodeSelCloseBtn").off().on("click", function() { self.el.hide(=
); });
+ self.UI.saveChanges.off().on("click", function(){ self.updateNodes=
(); });
+ self.UI.cancelChanges.off().on("click", function(){ self.cancelCha=
nges(); });
+
+ (function ($) {
+ // extending jquery fxn for enabling and disabling nodes.
+ $.fn.enable =3D function (isEnabled) {
+ return $(this).each(function () {
+ if(isEnabled){
+ $(this).removeAttr('disabled');
+ } else {
+ $(this).attr('disabled', 'disabled');
+ }
+ });
+ };
+ })(jQuery);
+
+ self.UI.enableEdit.off().on("click", function () {
+ self.toggleUI();
+ });
+ },
+
+ toggleUI : function(){
+ /**
+ * For turning on and off the child elements
+ */
+ var self =3D this,
+ checked =3D self.UI.enableEdit.is(':checked');
+
+ !checked ? self.cancelChanges() : "";
+
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].enable(checked);
+ });
+ if(checked){
+ self.UI.saveChanges.show();
+ self.UI.cancelChanges.show();
+ } else {
+ self.UI.saveChanges.hide();
+ self.UI.cancelChanges.hide();
+ }
+
+ },
+
+ cancelChanges : function() {
+ /**
+ * Reverting to previous values in case user change their minds
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].val(node[key]);
+ });
+ }
+ },
+
+ updateNodes : function (){
+ /**
+ * Changing the data in the underlying tree with user-specified va=
lues
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ if (!self.isAcceptableValue(self.UI.dist, 0, 1) ||
+ self.hasIllegalJsonCharacters(self.UI.name) ||
+ self.hasIllegalJsonCharacters(self.UI.annotation) ) {
+ return;
+ }
+ $.each(self.valuesOfConcern, function(key, value) {
+ (node[key]) =3D self.UI[key].val();
+ });
+ self.phyloTree.set("nodeAttrChangedTime", new Date());
+ } else {
+ alert("No node selected");
+ }
+ }
+
+
+});
+
+
+
+var PhyloVizSearch =3D UserMenuBase.extend({
+ /**
+ * Initializes the search panel on phyloviz and handles its user inter=
action
+ * It allows user to search the entire free based on some qualifer, li=
ke dist <=3D val.
+ */
+ initialize : function () {
+ var self =3D this;
+
+ $("#phyloVizSearchBtn").on("click", function(){
+ var searchTerm =3D $("#phyloVizSearchTerm"),
+ searchConditionVal =3D $("#phyloVizSearchCondition").val()=
.split("-"),
+ attr =3D searchConditionVal[0],
+ condition =3D searchConditionVal[1];
+ self.hasIllegalJsonCharacters(searchTerm);
+
+ if (attr =3D=3D=3D "dist"){
+ self.isAcceptableValue(searchTerm, 0, 1);
+ }
+ self.searchTree(attr, condition, searchTerm.val());
+ });
+ },
+
+ searchTree : function (attr, condition, val){
+ /**
+ * Searches the entire tree and will highlight the nodes that matc=
h the condition in green
+ */
+ d3.selectAll("g.node")
+ .classed("searchHighlight", function(d){
+ var attrVal =3D d[attr];
+ if (typeof attrVal !=3D=3D "undefined" && attrVal !=3D=3D =
null){
+ if (attr =3D=3D=3D "dist"){
+ switch (condition) {
+ case "greaterEqual":
+ return attrVal >=3D +val;
+ case "lesserEqual":
+ return attrVal <=3D +val;
+ default:
+ return;
+ }
+
+ } else if (attr =3D=3D=3D "name" || attr =3D=3D=3D "an=
notation") {
+ return attrVal.toLowerCase().indexOf(val.toLowerCa=
se()) !=3D=3D -1;
+ }
+ }
+ });
+ }
+});
\ No newline at end of file
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -272,6 +272,17 @@
}
=20
init_trackster_links();
+
+ function init_phyloviz_links() {
+ // PhyloViz links
+ // Add to trackster browser functionality
+ $(".phyloviz-add").live("click", function() {
+ var dataset =3D this,
+ dataset_jquery =3D $(this);
+ window.parent.location =3D dataset_jquery.attr("new-url");
+ });
+ }
+ init_phyloviz_links();
=20
// History rename functionality.
async_save_text("history-name-container", "history-name", "${h.url_for=
( controller=3D"/history", action=3D"rename_async", id=3Dtrans.security.enc=
ode_id(history.id) )}", "new_name", 18);
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -29,6 +29,9 @@
## Render the dataset `data` as history item, using `hid` as the displayed=
id
<%def name=3D"render_dataset( data, hid, show_deleted_on_refresh =3D False=
, for_editing =3D True, display_structured =3D False )"><%
+
+ from galaxy.datatypes.xml import Phyloxml
+ from galaxy.datatypes.data import Newick, Nexus
dataset_id =3D trans.security.encode_id( data.id )
=20
if data.state in ['no state','',None]:
@@ -230,6 +233,14 @@
action-url=3D"${h.url_for( controller=3D't=
racks', action=3D'browser', dataset_id=3Ddataset_id)}"
new-url=3D"${h.url_for( controller=3D'trac=
ks', action=3D'index', dataset_id=3Ddataset_id, default_dbkey=3Ddata.dbkey)=
}" title=3D"View in Trackster"></a>
%endif
+ <%
+ isPhylogenyData =3D isinstance(data.datatype, =
(Phyloxml, Nexus, Newick))
+ %>
+ %if isPhylogenyData:
+ <a href=3D"javascript:void(0)" class=3D"i=
con-button chart_curve phyloviz-add"
+ action-url=3D"${h.url_for( controller=
=3D'phyloviz', action=3D'-', dataset_id=3Ddataset_id)}"
+ new-url=3D"${h.url_for( controller=3D'p=
hyloviz', action=3D'index', dataset_id=3Ddataset_id)}" title=3D"View in Phy=
loviz"></a>
+ %endif
%if trans.user:
%if not display_structured:
<div style=3D"float: right">
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 templates/visualization/phyloviz.mako
--- /dev/null
+++ b/templates/visualization/phyloviz.mako
@@ -0,0 +1,320 @@
+<%inherit file=3D"/webapps/galaxy/base_panels.mako"/>
+##
+<%def name=3D"init()">
+ <%
+ self.has_left_panel=3DFalse
+ self.has_right_panel=3DFalse
+ self.active_view=3D"visualization"
+ self.message_box_visible=3DFalse
+ %>
+</%def>
+
+<%def name=3D"stylesheets()">
+ ${parent.stylesheets()}
+ <style>
+
+ .node circle {
+ cursor: pointer;
+ fill: #fff;
+ stroke: steelblue;
+ stroke-width: 1.5px;
+ }
+
+ .node.searchHighlight circle {
+ stroke-width: 3px;
+ stroke: #7adc26;
+ }
+
+ .node.selectedHighlight circle {
+ stroke-width: 3px;
+ stroke: #dc143c;
+ }
+
+ path.link {
+ fill: none;
+ stroke: #B5BBFF;
+ stroke-width: 4.0px;
+ }
+
+
+ div #phyloVizNavContainer{
+ text-align: center;
+ width: 100%;
+ height: 0px;
+ }
+
+ div #phyloVizNav{
+ font-weight: bold;
+ display: inline-block;
+ background: transparent;
+ top: -2em;
+ position: relative;
+ }
+
+ div .navControl{
+ float: left;
+ }
+
+ div#FloatingMenu {
+ left: 0;
+ top: 15%;
+ width:20%;
+ z-index:100;
+ padding: 5px;
+
+ }
+
+ div#SettingsMenu {
+ width: 25%;
+ top: 350px;
+
+ }
+
+ div#nodeSelectionView {
+ width: 25%;
+ top:70px;
+ }
+
+ .Panel {
+ right: 0%;
+ z-index: 101;
+ position: fixed;
+
+ ## Borrowed from galaxy modal_dialogues
+ background-color: white;
+ border: 1px solid #999;
+ border: 1px solid rgba(0, 0, 0, 0.3);
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ }
+
+ span.PhylovizCloseBtn{
+ cursor: pointer;
+ float : right;
+ }
+
+ #PhyloViz{
+ width: 100%;
+ height: 95%;
+ }
+
+ h2.PhyloVizMenuTitle{
+ color: white;
+ }
+
+ ## Settings Menu
+ .SettingMenuRows{
+ margin: 2px 0 2px 0;
+ }
+
+
+ ## Helper Styles
+ .PhyloVizFloatLeft{
+ float : left;
+ }
+ .icon-button.zoom-in,.icon-button.zoom-out{display:inline-block;he=
ight:16px;width:16px;margin-bottom:-3px;cursor:pointer;}
+ .icon-button.zoom-out{background:transparent url(../images/fugue/m=
agnifier-zoom-out.png) center center no-repeat;}
+ .icon-button.zoom-in{margin-left:10px;background:transparent url(.=
./images/fugue/magnifier-zoom.png) center center no-repeat;}
+
+ </style>
+</%def>
+
+
+<%def name=3D"javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "galaxy.panels", "libs/d3", "mvc/data", "viz/visualization", "=
viz/phyloviz")}
+</%def>
+
+
+
+<%def name=3D"center_panel()">
+
+ <div class=3D"unified-panel-header" unselectable=3D"on">
+ <div class=3D"unified-panel-header-inner">
+ <div style=3D"float:left;" id=3D"title"></div>
+ <div style=3D"float:right;" id=3D"panelHeaderRightBtns"></div>
+ </div>
+ <div style=3D"clear: both"></div>
+ </div>
+
+
+ <div id=3D"phyloVizNavContainer">
+ <div id=3D"phyloVizNav">
+ %if config["ext"] =3D=3D "nex" and not config["saved_visualiza=
tion"]:
+ <div id =3D "phylovizNexInfo" class=3D"navControl">
+ <p>Select a tree to view:
+ <select id=3D"phylovizNexSelector">
+ % for tree, index in config["trees"]:
+ <option value=3D"${index}">${tree}</option>
+ % endfor
+ </select>
+ </p>
+ </div>
+ %endif
+ <div id=3D"phyloVizNavBtns" class=3D"navControl">
+ </div>
+ <div class=3D"navControl">
+ <p> | Alt+click to select nodes</p>
+ </div>
+
+
+ </div>
+
+ </div>
+
+ ## Node Selection Menu
+ <div id=3D"nodeSelectionView" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Search / Edit Nodes :
+ <span class=3D"PhylovizCloseBtn" id=3D"nodeSelCloseBtn"> X=
</span>
+ </h3>
+ </div>
+
+ <div class=3D"modal-body">
+
+ <div class=3D"SettingMenuRows">
+ Search for nodes with:
+ <select id=3D"phyloVizSearchCondition" style=3D"width: 55%=
">
+ <option value=3D"name-containing">Name (containing)</o=
ption>
+ <option value=3D"annotation-containing">Annotation (co=
ntaining)</option>
+ <option value=3D"dist-greaterEqual">Distance (>=3D)</o=
ption>
+ <option value=3D"dist-lesserEqual">Distance (<=3D)</op=
tion>
+ </select>
+ <input type=3D"text" id=3D"phyloVizSearchTerm" value=3D"N=
one" size=3D"15" displayLabel=3D"Distance">
+
+ <div class=3D"SettingMenuRows" style=3D"text-align: center=
;">
+ <button id=3D"phyloVizSearchBtn" > Search! </button>
+ </div>
+ </div>
+
+ <br/>
+
+ <div class=3D"SettingMenuRows">
+ Name: <input type=3D"text" id=3D"phyloVizSelectedNodeName"=
value=3D"None" size=3D"15" disabled=3D"disabled" >
+ </div>
+ <div class=3D"SettingMenuRows">
+ Dist: <input type=3D"text" id=3D"phyloVizSelectedNodeDist"=
value=3D"None" size=3D"15" disabled=3D"disabled" displayLabel=3D"Distance">
+ </div>
+ <div class=3D"SettingMenuRows">
+ Annotation:
+ <textarea id=3D"phyloVizSelectedNodeAnnotation" disabled=
=3D"disabled" ></textarea>
+ </div>
+ <div class=3D"SettingMenuRows">
+ Edit: <input type=3D"checkbox" id=3D"phylovizEditNodesChec=
k" value=3D"You can put custom annotations here and it will be saved">
+ <button id=3D"phylovizNodeSaveChanges" style=3D"display: n=
one;"> Save edits</button>
+ <button id=3D"phylovizNodeCancelChanges" style=3D"display:=
none;"> Cancel</button>
+ </div>
+ </div>
+ </div>
+
+ ## Settings Menus
+ <div id=3D"SettingsMenu" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Phyloviz Settings:
+ <span class=3D"PhylovizCloseBtn" id=3D"settingsCloseBtn"> =
X </span>
+ </h3>
+ </div>
+ <div class=3D"modal-body">
+ <div class=3D"SettingMenuRows">
+ Phylogenetic Spacing (px per unit): <input id=3D"phyloVizT=
reeSeparation" type=3D"text" value=3D"250" size=3D"10" displayLabel=3D"Phyl=
ogenetic Separation"> (50-2500)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Vertical Spacing (px): <input type=3D"text" id=3D"phyloViz=
TreeLeafHeight" value=3D"18" size=3D"10" displayLabel=3D"Vertical Spacing">=
(5-30)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Font Size (px): <input type=3D"text" id=3D"phyloVizTreeFon=
tSize" value=3D"12" size=3D"4" displayLabel=3D"Font Size"> (5-20)
+ </div>
+
+ </div>
+ <div class=3D"modal-footer">
+ <button id=3D"phylovizResetSettingsBtn" class=3D"PhyloVizFloat=
Left" > Reset </button>
+ <button id=3D"phylovizApplySettingsBtn" class=3D"PhyloVizFloat=
Right" > Apply </button>
+ </div>
+ </div>
+
+
+
+
+
+
+ <div class=3D"Panel" id=3D"FloatingMenu" style=3D"display: None;">
+
+ <h2>PhyloViz (<a onclick=3D"displayHelp()" href=3D"javascript:void=
(0);">?</a>)</h2>
+ <div style=3D"display: none;">
+ <h2>Summary of Interactions and Functions:</h2>
+ <div class=3D"hint">1. Expansion of Nodes: click or option-cli=
ck to expand or collapse</div>
+ <div class=3D"hint">2. Zooming and translation: mousewheel, bu=
ttons, click and drag, double click. Reset</div>
+ <div class=3D"hint">3. Tooltip: Displays "Name and Size" on mo=
useOver on nodes</div>
+ <div class=3D"hint">4. Minimap: Currently displays an exact bu=
t scaled down replicate of the tree, orange bounding box is correct for lin=
ear only<br/>
+ Can be switched on or off</div>
+ <div class=3D"hint">5. Changing Layouts: Able to change betwee=
n circular and linear layouts.</div>
+
+ </div>
+
+ <h5>Scaling & Rotation:</h5>
+ <button id=3D"phylovizZoomInBtn" class=3D"" > + </button>
+ <button id=3D"phylovizZoomOutBtn" class=3D"" > - </button>
+
+
+ <h5>Translation:</h5>
+ <button id=3D"phylovizTranslateUpBtn" > Up </button>
+ <button id=3D"phylovizTranslateDownBtn" > Down </button>
+ <br/>
+ <button id=3D"phylovizTranslateLeftBtn" > Left </button>
+ <button id=3D"phylovizTranslateRightBtn" > Right </button>
+
+
+
+ <h5>Others:</h5>
+ <button id=3D"phylovizResetBtn" > Reset Zoom/Translate </button>
+ <button id=3D"phylovizSaveBtn" > Save vizualization </button>
+ <button id=3D"phylovizOpenSettingsBtn" > Settings </button>
+ </div>
+
+ <div id=3D"PhyloViz" >
+ </div>
+
+ <script type=3D"text/javascript">
+
+ function initPhyloViz(data, config) {
+ var phyloviz;
+
+ // -- Initialization code |-->
+ phyloviz =3D new PhylovizView({
+ data: data,
+ layout : "Linear",
+ config : config
+ });
+
+ // -- Render viz. --
+ phyloviz.render();
+
+ }
+
+ $(function firstVizLoad(){ // calls when viz is loaded for t=
he first time
+ var config =3D JSON.parse( '${ h.to_json_string( config )}');
+ var data =3D JSON.parse('${h.to_json_string(data)}');
+ initPhyloViz(data, config);
+ });
+
+ </script>
+
+</%def>
+
+
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 test-data/visualization/phyloviz/1_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/1_nexus.nex
@@ -0,0 +1,87 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on June 12, 2012; 23:00 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Olariaga I., Grebenc T., Salcedo I., & Mart=C3=ADn M.P. 2012. Two new spec=
ies of Hydnum
+with ovoid basidiospores: H. ovoideisporum and H. vesterholtii. Mycologia,=
.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S128=
31]
+
+BEGIN TREES;
+ TITLE Hydnum_ITS_result;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Hydnum_aff_ellipsosporum_RUFHYD1_AJ535304,
+ 2 Hydnum_albidum_ALB_AY817135,
+ 3 Hydnum_albidum_ALBHYD1_AJ534974,
+ 4 Hydnum_albomagnum_ALM_DQ218305,
+ 5 Hydnum_ellipsosporum_ELL_AY817138,
+ 6 Hydnum_ellipsosporum_RUFHYD8_AJ547882,
+ 7 Hydnum_ovoidisporum_12317BIOFungi,
+ 8 Hydnum_ovoidisporum_12683BIOFungi,
+ 9 Hydnum_ovoidisporum_12902BIOFungi,
+ 10 Hydnum_ovoidisporum_14130BIOFungi,
+ 11 Hydnum_repandum_RE1_REP1_AJ889978,
+ 12 Hydnum_repandum_RE1_REP2_AJ889949,
+ 13 Hydnum_repandum_RE1_REP3_AY817136,
+ 14 Hydnum_repandum_RE1_REP6_UDB000025,
+ 15 Hydnum_repandum_RE1_REP7_UDB000096,
+ 16 Hydnum_repandum_RE1_REP8_UDB001479,
+ 17 Hydnum_repandum_RE1_REPHYD10_AJ547888,
+ 18 Hydnum_repandum_RE1_REPHYD11_AJ547886,
+ 19 Hydnum_repandum_RE1_REPHYD1_AJ547871,
+ 20 Hydnum_repandum_RE1_REPHYD3_AJ547874,
+ 21 Hydnum_repandum_RE1_REPHYD4_AJ547876,
+ 22 Hydnum_repandum_RE1_REPHYD5_AJ547875,
+ 23 Hydnum_repandum_RE1_REPHYD6_AJ547877,
+ 24 Hydnum_repandum_RE1_REPHYD7_AJ547878,
+ 25 Hydnum_repandum_RE1_REPHYD8_AJ547881,
+ 26 Hydnum_repandum_RE1_REPHYD9_AJ547883,
+ 27 Hydnum_repandum_RE1_RUFHYD10_AJ547866,
+ 28 Hydnum_repandum_RE1_RUFHYD11_AJ547889,
+ 29 Hydnum_repandum_RE1_RUFHYD9_AJ535305,
+ 30 Hydnum_rufescens_RU1_RUFHYD5_AJ547869,
+ 31 Hydnum_rufescens_RU1_RUFHYD6_AJ547884,
+ 32 Hydnum_rufescens_RU1_RUFHYD7_AJ547870,
+ 33 Hydnum_rufescens_RU2_REP5_DQ367902,
+ 34 Hydnum_rufescens_RU2_RUFHYD2_AJ535301,
+ 35 Hydnum_rufescens_RU3_12901BIOFungi,
+ 36 Hydnum_rufescens_RU3_REP4_DQ218306,
+ 37 Hydnum_rufescens_RU3_RUFHYD3_AJ535303,
+ 38 Hydnum_rufescens_RU3_RUFHYD4_AJ535302,
+ 39 Hydnum_rufescens_RU4_RUFHYD12_AJ839969,
+ 40 Hydnum_rufescens_RU4_RUFHYD16_AJ547868,
+ 41 Hydnum_rufescens_RU4_RUFHYD17_AJ547885,
+ 42 Hydnum_rufescens_RU4_UMB1_DQ367903,
+ 43 Hydnum_rufescens_RU5_12760BIOFungi,
+ 44 Hydnum_rufescens_RU5_ALBHYD2_AJ534975,
+ 45 Hydnum_rufescens_RU5_RUF2_DQ658890,
+ 46 Hydnum_rufescens_RU5_RUF4_UDB001465,
+ 47 Hydnum_rufescens_RU5_RUF5_UDB002423,
+ 48 Hydnum_rufescens_RU5_RUFHYD14_AJ547872,
+ 49 Hydnum_rufescens_RU6_RUF1_AY817137,
+ 50 Hydnum_rufescens_RU6_RUFHYD15_AJ547867,
+ 51 Hydnum_rufescens_wrong_taxonomy_RUF3_AM087246,
+ 52 Hydnum_umbilicatum_UMBHYD1_AJ534972,
+ 53 Hydnum_umbilicatum_UMBHYD2_AJ534973,
+ 54 Hydnum_vesterholtii_10429BIOFungi,
+ 55 Hydnum_vesterholtii_10452BIOFungi,
+ 56 Hydnum_vesterholtii_12330BIOFungi,
+ 57 Hydnum_vesterholtii_12904BIOFungi,
+ 58 Hydnum_vesterholtii_REPHYD12A_AJ547879,
+ 59 Hydnum_vesterholtii_REPHYD12C_AJ783968,
+ 60 Hydnum_vesterholtii_REPHYD13_AJ547887,
+ 61 Sistotrema_muscicola_AJ606040,
+ 62 Sistotrema_alboluteum_AJ606042;
+ TREE Fig._2 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(2:100.0,3:100.0):100.0):60.639999,(((56:100.0,58:100.0,59:100.0=
):84.639999,(54:100.0,55:100.0,57:100.0,60:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(11:100.0,12:100.0,13:100.0,14:100.0,15:100.0,1=
6:100.0,17:100.0,18:100.0,19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,24:1=
00.0,25:100.0,26:100.0):99.93):68.690002,(((33:100.0,34:100.0):49.8050005,(=
35:100.0,36:100.0,37:100.0,38:100.0):99.989998):49.8050005,((7:100.0,8:100.=
0,9:100.0,10:100.0):100.0,(42:100.0,(39:100.0,40:100.0,41:100.0):98.449997)=
:86.790001,((52:100.0,53:100.0):99.93,(1:100.0,(5:97.47999949999999,6:100.0=
):97.47999949999999):100.0):53.310001,(27:100.0,(28:100.0,29:100.0,49:100.0=
,50:100.0):47.404999):47.404999,(43:100.0,44:100.0,45:100.0,46:100.0,47:100=
.0,48:100.0):99.459999):29.245001):29.245001):51.580002):61.540001):49.66);
+ TREE PAUP_1 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(3:100.0,2:100.0):100.0):60.639999,(((58:100.0,59:100.0,56:100.0=
):84.639999,(60:100.0,54:100.0,55:100.0,57:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,2=
4:100.0,25:100.0,26:100.0,17:100.0,18:100.0,11:100.0,12:100.0,13:100.0,14:1=
00.0,15:100.0,16:100.0):99.93):68.690002,((34:100.0,33:100.0):99.610001,(37=
:100.0,38:100.0,35:100.0,36:100.0):99.989998,(42:100.0,(39:100.0,41:100.0,4=
0:100.0):98.449997):86.790001,(8:100.0,7:100.0,9:100.0,10:100.0):100.0,((52=
:100.0,53:100.0):99.93,(1:100.0,(5:100.0,6:100.0):94.959999):100.0):53.3100=
01,(29:100.0,27:100.0,28:100.0,50:100.0,49:100.0):94.809998,(44:100.0,43:10=
0.0,48:100.0,45:100.0,46:100.0,47:100.0):99.459999):58.490002):51.580002):6=
1.540001):49.66);
+
+
+
+END;
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 test-data/visualization/phyloviz/2_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/2_nexus.nex
@@ -0,0 +1,96 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on August 18, 2012; 12:14 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Naish D., Dyke G., Cau A., & Escuilli=C3=A9 F. 2012. A gigantic bird from =
the Upper Cretaceous
+of Central Asia. Biology Letters, 8(1): 97-100.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S130=
08]
+
+BEGIN TREES;
+ TITLE Imported_trees;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Herrerasaurus,
+ 2 Tawa,
+ 3 Allosaurus,
+ 4 Alvarezsaurus,
+ 5 Anchiornis,
+ 6 Archaeopteryx,
+ 7 Archaeorhynchus,
+ 8 Avimimus,
+ 9 Baryonyx,
+ 10 Beipiaosaurus,
+ 11 Caenagnathus,
+ 12 Caudipteryx,
+ 13 Ceratosaurus,
+ 14 Chirostenotes,
+ 15 Citipati,
+ 16 Compsognathus,
+ 17 Confuciusornis,
+ 18 Dilong,
+ 19 Dilophosaurus,
+ 20 Epidendrosaurus,
+ 21 Epidexipteryx,
+ 22 Erlicosaurus,
+ 23 Eustreptospondylus,
+ 24 Gallimimus,
+ 25 Garudimimus,
+ 26 Gobipteryx,
+ 27 Guanlong,
+ 28 Haplocheirus,
+ 29 Harpymimus,
+ 30 Hebeiornis,
+ 31 Hongshanornis,
+ 32 Huoshanornis,
+ 33 Iberomesornis,
+ 34 Ichthyornis,
+ 35 Incisivosaurus,
+ 36 Jeholornis,
+ 37 Limusaurus,
+ 38 Longicrusavis,
+ 39 Longipteryx,
+ 40 Longirostravis,
+ 41 Majungasaurus,
+ 42 Masiakasaurus,
+ 43 Monolophosaurus,
+ 44 Mononykus,
+ 45 Neornithes,
+ 46 Ornitholestes,
+ 47 Ornithomimus,
+ 48 Patagonykus,
+ 49 Patagopteryx,
+ 50 Pelecanimimus,
+ 51 Pengornis,
+ 52 Protarchaeopteryx,
+ 53 Protopteryx,
+ 54 Rinchenia,
+ 55 Sapeornis,
+ 56 Segnosaurus,
+ 57 Shenzhousaurus,
+ 58 Shuvuuia,
+ 59 Sinornithosaurus,
+ 60 Sinosauropteryx,
+ 61 Sinovenator,
+ 62 Sinraptor,
+ 63 Syntarsus_kayentakatae,
+ 64 Troodon,
+ 65 Tyrannosaurus,
+ 66 Velociraptor,
+ 67 Yanornis,
+ 68 Yixianornis,
+ 69 Zhongjianornis,
+ 70 Zhongornis,
+ 71 Zuolong,
+ 72 Samrukia;
+ TREE Figure_1A =3D [&R] (1,(2,(((((43,(3,62)),(71,((46,((((28,(4,(48=
,(44,58)))),((((5,(61,(64,(59,66)))),(6,((36,(55,(69,(((7,34,45,49,72,(31,3=
8),(67,68)),(33,((32,((26,30),(39,40))),(51,53)))),(17,70))))),(20,21)))),(=
(11,(12,(8,(14,(15,54))))),(35,52))),(10,(22,56)))),(50,(57,(29,(25,(24,47)=
))))),(16,60))),(27,(18,65))))),(9,23)),(13,(41,(37,42)))),(19,63))));
+
+
+
+END;
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 test-data/visualization/phyloviz/3_phyloxml.xml
--- /dev/null
+++ b/test-data/visualization/phyloviz/3_phyloxml.xml
@@ -0,0 +1,257 @@
+<?xml version=3D"1.0" encoding=3D"UTF-8"?>
+<phyloxml xmlns:xsi=3D"http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation=3D"http://www.phyloxml.org http://www.phyloxml.org/1=
.10/phyloxml.xsd"
+ xmlns=3D"http://www.phyloxml.org">
+ <phylogeny rooted=3D"true">
+ <clade>
+ <clade>
+ <branch_length>0.18105</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.07466</branch_length>
+ <confidence type=3D"unknown">32.0</confidence>
+ <clade>
+ <branch_length>0.26168</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.22058</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.28901</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.06584</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.02309</branch_length>
+ <confidence type=3D"unknown">43.0</confidenc=
e>
+ <clade>
+ <branch_length>0.0746</branch_length>
+ <confidence type=3D"unknown">100.0</confi=
dence>
+ <clade>
+ <branch_length>0.02365</branch_length>
+ <confidence type=3D"unknown">88.0</con=
fidence>
+ <clade>
+ <name>22_MOUSE</name>
+ <branch_length>0.05998</branch_leng=
th>
+ <taxonomy>
+ <code>MOUSE</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>Apaf-1_HUMAN</name>
+ <branch_length>0.01825</branch_leng=
th>
+ <taxonomy>
+ <code>HUMAN</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>12_CANFA</name>
+ <branch_length>0.04683</branch_length>
+ <taxonomy>
+ <code>CANFA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>11_CHICK</name>
+ <branch_length>0.15226</branch_length>
+ <taxonomy>
+ <code>CHICK</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>16_XENLA</name>
+ <branch_length>0.4409</branch_length>
+ <taxonomy>
+ <code>XENLA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.17031</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.10929</branch_length>
+ <confidence type=3D"unknown">100.0</confiden=
ce>
+ <clade>
+ <name>14_FUGRU</name>
+ <branch_length>0.02255</branch_length>
+ <taxonomy>
+ <code>FUGRU</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>15_TETNG</name>
+ <branch_length>0.09478</branch_length>
+ <taxonomy>
+ <code>TETNG</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>17_BRARE</name>
+ <branch_length>0.1811</branch_length>
+ <taxonomy>
+ <code>BRARE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.01594</branch_length>
+ <confidence type=3D"unknown">53.0</confidence>
+ <clade>
+ <branch_length>0.10709</branch_length>
+ <confidence type=3D"unknown">68.0</confidence>
+ <clade>
+ <name>1_BRAFL</name>
+ <branch_length>0.26131</branch_length>
+ <taxonomy>
+ <code>BRAFL</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>18_NEMVE</name>
+ <branch_length>0.38014</branch_length>
+ <taxonomy>
+ <code>NEMVE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>23_STRPU</name>
+ <branch_length>0.48179</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.34475</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>26_STRPU</name>
+ <branch_length>0.36374</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1319">
+ <domain from=3D"18" to=3D"98" confidence=3D"=
3.4E-5">Death</domain>
+ <domain from=3D"189" to=3D"481" confidence=
=3D"1.8E-10">NB-ARC</domain>
+ <domain from=3D"630" to=3D"668" confidence=
=3D"8.2E-5">WD40</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>25_STRPU</name>
+ <branch_length>0.33137</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1947">
+ <domain from=3D"143" to=3D"227" confidence=
=3D"7.4E-5">Death</domain>
+ <domain from=3D"227" to=3D"550" confidence=
=3D"2.0E-13">NB-ARC</domain>
+ <domain from=3D"697" to=3D"736" confidence=
=3D"7.9E-4">WD40</domain>
+ <domain from=3D"745" to=3D"785" confidence=
=3D"1.5">WD40</domain>
+ <domain from=3D"1741" to=3D"1836" confidence=
=3D"2.0">Adeno_VII</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>1.31498</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>CED4_CAEEL</name>
+ <branch_length>0.13241</branch_length>
+ <taxonomy>
+ <code>CAEEL</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"714">
+ <domain from=3D"7" to=3D"90" confidence=3D"9.2E=
-14">CARD</domain>
+ <domain from=3D"116" to=3D"442" confidence=3D"5=
.8E-151">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>31_CAEBR</name>
+ <branch_length>0.04777</branch_length>
+ <taxonomy>
+ <code>CAEBR</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"554">
+ <domain from=3D"1" to=3D"75" confidence=3D"0.00=
46">CARD</domain>
+ <domain from=3D"101" to=3D"427" confidence=3D"2=
.1E-123">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.13172</branch_length>
+ <confidence type=3D"unknown">45.0</confidence>
+ <clade>
+ <branch_length>0.24915</branch_length>
+ <confidence type=3D"unknown">95.0</confidence>
+ <clade>
+ <branch_length>0.76898</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>28_DROPS</name>
+ <branch_length>0.1732</branch_length>
+ <taxonomy>
+ <code>DROPS</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"535">
+ <domain from=3D"112" to=3D"399" confidence=
=3D"1.4E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>Dark_DROME</name>
+ <branch_length>0.18863</branch_length>
+ <taxonomy>
+ <code>DROME</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1421">
+ <domain from=3D"108" to=3D"397" confidence=
=3D"2.1E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>29_AEDAE</name>
+ <branch_length>0.86398</branch_length>
+ <taxonomy>
+ <code>AEDAE</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"423">
+ <domain from=3D"109" to=3D"421" confidence=3D"9=
.3E-6">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>30_TRICA</name>
+ <branch_length>0.97698</branch_length>
+ <taxonomy>
+ <code>TRICA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ </clade>
+ </phylogeny>
+</phyloxml>
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 test-data/visualization/phyloviz/4_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/4_newick.nhx
@@ -0,0 +1,33 @@
+(((BGIOSIBCA028421_ORYSA:0.423485[&&NHX:S=3DORYSA:O=3DBGIOSIBCA028421.1:G=
=3DBGIOSIBCA028421],
+At5g41150_ARATH:0.273135[&&NHX:S=3DARATH:O=3DAt5g41150.1:G=3DAt5g41150]
+):0.690991[&&NHX:S=3DMagnoliophyta:D=3DN:B=3D100],
+(rad16_SCHPO:0.718598[&&NHX:S=3DSCHPO:O=3DSPCC970.01:G=3DSPCC970.01],
+RAD1_YEAST:1.05456[&&NHX:S=3DYEAST:O=3DYPL022W.1:G=3DYPL022W]
+):0.344838[&&NHX:S=3DAscomycota:D=3DN:B=3D100]
+):0.103849[&&NHX:S=3DEukaryota:D=3DN:B=3D61],
+((((((((ERCC4_HUMAN:0.067531[&&NHX:S=3DHUMAN:O=3DENST00000311895.3:G=3DENS=
G00000175595],
+Ercc4_MOUSE:0.17422[&&NHX:S=3DMOUSE:O=3DENSMUST00000023206.5:G=3DENSMUSG00=
000022545]
+):0.065513[&&NHX:S=3DEuarchontoglires:D=3DN:B=3D100],
+ENSMODT00000006086_MONDO:0.104633[&&NHX:S=3DMONDO:O=3DENSMODT00000006086.2=
:G=3DENSMODG00000004840]
+):0.083764[&&NHX:S=3DTheria:D=3DN:B=3D100],
+Q5ZJP8_CHICK:0.153132[&&NHX:S=3DCHICK:O=3DENSGALT00000004716.2:G=3DENSGALG=
00000002981]
+):0.057998[&&NHX:S=3DAmniota:D=3DN:B=3D100],
+ENSXETT00000024054_XENTR:0.288632[&&NHX:S=3DXENTR:O=3DENSXETT00000024054.2=
:G=3DENSXETG00000010991]
+):0.075713[&&NHX:S=3DTetrapoda:D=3DN:B=3D100],
+(zgc-63468_BRARE:0.2218[&&NHX:S=3DBRARE:O=3DENSDART00000015780.4:G=3DENSDA=
RG00000014161],
+NEWSINFRUT00000137921_FUGRU:0.220441[&&NHX:S=3DFUGRU:O=3DNEWSINFRUT0000013=
7921.3:G=3DNEWSINFRUG00000130312]
+):0.170605[&&NHX:S=3DClupeocephala:D=3DN:B=3D100]
+):0.238713[&&NHX:S=3DEuteleostomi:D=3DN:B=3D100],
+ENSCINT00000011737_CIOIN:0.623567[&&NHX:S=3DCIOIN:O=3DENSCINT00000011737.2=
:G=3DENSCING00000005673]
+):0.07499[&&NHX:S=3DChordata:D=3DN:B=3D100],
+(Sm00.scaff00195.0600_SCHMA:0.784609[&&NHX:S=3DSCHMA:O=3DSm00.scaff00195.0=
600:G=3DSm00.scaff00195.0600],
+(CBG03141_CAEBR:0.093703[&&NHX:S=3DCAEBR:O=3DCBG03141:G=3DCBG03141],
+NP_496498_CAEEL:0.212236[&&NHX:S=3DCAEEL:O=3DC47D12.8.1:G=3DC47D12.8]
+):1.47416[&&NHX:S=3DCaenorhabditis:D=3DN:B=3D94]
+):0.26906[&&NHX:S=3DBilateria:D=3DN:B=3D97]
+):0.071406[&&NHX:S=3DBilateria:D=3DN:B=3D1],
+(mei-9-RA_DROME:0.170289[&&NHX:S=3DDROME:O=3DCG3697-RA.3:G=3DCG3697],
+GA17620-PA_DROPS:0.154817[&&NHX:S=3DDROPS:O=3DGA17620-PA:G=3DGA17620]
+):0.818474[&&NHX:S=3DSophophora:D=3DN:B=3D100]
+):0
+)[&&NHX:S=3DEukaryota:D=3DN];
\ No newline at end of file
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r f6d9557b6d77bde1c8049ba=
ffb65374729d51b89 test-data/visualization/phyloviz/5_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/5_newick.nhx
@@ -0,0 +1,1 @@
+(CAE_ELE_PORCN:0.303421 ,((((DRO_PER_PORCN:0.001000 ,DRO_PSE_PORCN:0.00100=
0 )67:0.141994 ,(DRO_ANA_PORCN:0.111899 ,(DRO_ERE_PORCN:0.030516 ,(DRO_MEL_=
PORCN:0.021127 ,DRO_SEC_PORCN:0.021127 )38:0.030516 )35:0.111899 )18:0.1419=
94 )16:0.162611 ,(DRO_WIL_PORCN:0.152225 ,(DRO_VIR_PORCN:0.085057 ,DRO_MOJ_=
PORCN:0.085057 )24:0.152225 )15:0.162611 )13:0.295081 ,(ANO_GAM_PORCN:0.287=
545 ,((CIO_INT_PORCN:0.100686 ,CIO_SAV_PORCN:0.100686 )19:0.275542 ,((LOA_L=
OA_PORCN:0.036278 ,BRU_MAL_PORCN:0.036278 )29:0.272631 ,(((((DAN_RER_PORCN:=
0.086499 ,((TAK_RUB_PORCN:0.032609 ,TET_NIG_PORCN:0.032609 )32:0.048864 ,(G=
AD_MOR_PORCN:0.039387 ,(ORY_LAT_PORCN:0.031729 ,(GAS_ACU_PORCN:0.021882 ,OR=
E_NIL_PORCN:0.021882 )37:0.031729 )34:0.039387 )28:0.048864 )27:0.086499 )2=
3:0.119618 ,(LAT_CHA_PORCN:0.099348 ,((XEN_LAE_PORCN:0.033333 ,XEN_TRO_PORC=
N:0.033333 )31:0.091250 ,(ANO_CAR_PORCN:0.086538 ,((MON_DOM_PORCN:0.014100 =
,(MAC_EUG_PORCN:0.005423 ,SAR_HAR_PORCN:0.005423 )57:0.014100 )42:0.062862 =
,(ORN_ANA_PORCN:0.057974 ,(GOR_GOR_PORCN:0.033876 ,(FEL_CAT_PORCN:0.022851 =
,(PRO_CAP_PORCN:0.019716 ,(CAV_POR_PORCN:0.018599 ,(ERI_EUR_PORCN:0.015518 =
,((DIP_ORD_PORCN:0.007231 ,(MUS_MUS_PORCN:0.001085 ,(RAT_NOR_PORCN:0.001000=
,CRI_GRI_PORCN:0.001000 )69:0.001085 )64:0.007231 )53:0.012954 ,(DAS_NOV_P=
ORCN:0.011362 ,(LOX_AFR_PORCN:0.010575 ,(CAL_JAC_PORCN:0.010332 ,(OCH_PRI_P=
ORCN:0.010063 ,(MIC_MUR_PORCN:0.009123 ,(SUS_SCR_PORCN:0.008880 ,(MYO_LUC_P=
ORCN:0.008460 ,((CAN_FAM_PORCN:0.005423 ,AIL_MEL_PORCN:0.005423 )58:0.00809=
3 ,((PTE_VAM_PORCN:0.006508 ,BOS_TAU_PORCN:0.006508 )55:0.007494 ,((SPE_TRI=
_PORCN:0.003254 ,TUP_BEL_PORCN:0.003254 )61:0.006929 ,((OTO_GAR_PORCN:0.001=
085 ,(ORY_CUN_PORCN:0.001000 ,TUR_TRU_PORCN:0.001000 )68:0.001085 )65:0.005=
965 ,(EQU_CAB_PORCN:0.003688 ,(MAC_MUL_PORCN:0.002711 ,(PAN_TRO_PORCN:0.001=
446 ,(HOM_SAP_PORCN:0.001085 ,(PON_ABE_PORCN:0.001000 ,NOM_LEU_PORCN:0.0010=
00 )70:0.001085 )66:0.001446 )63:0.002711 )62:0.003688 )60:0.005965 )56:0.0=
06929 )54:0.007494 )52:0.008093 )51:0.008460 )50:0.008880 )49:0.009123 )48:=
0.010063 )47:0.010332 )46:0.010575 )45:0.011362 )44:0.012954 )43:0.015518 )=
41:0.018599 )40:0.019716 )39:0.022851 )36:0.033876 )30:0.057974 )26:0.06286=
2 )25:0.086538 )22:0.091250 )21:0.099348 )20:0.119618 )17:0.214465 ,(BRA_FL=
O_PORCN:0.189220 ,SAC_KOW_PORCN:0.189220 )12:0.214465 )11:0.257058 ,(NEM_VE=
C_PORCN:0.246631 ,AMP_QUE_PORCN:0.246631 )9:0.257058 )8:0.266904 ,(TRI_CAS_=
PORCN:0.259494 ,(PED_HUM_PORCN:0.227009 ,(NAS_VIT_PORCN:0.160241 ,(API_MEL_=
PORCN:0.031851 ,(BOM_TER_PORCN:0.004808 ,BOM_IMP_PORCN:0.004808 )59:0.03185=
1 )33:0.160241 )14:0.227009 )10:0.259494 )7:0.266904 )6:0.272631 )5:0.27554=
2 )4:0.287545 )3:0.295081 )2:0.303421 )1:0.0001;
https://bitbucket.org/galaxy/galaxy-central/changeset/75a03bacdc7a/
changeset: 75a03bacdc7a
user: jgoecks
date: 2012-08-27 20:08:55
summary: Merged in Tomithy/galaxy-central-phyloviz-2 (pull request #65)
affected #: 19 files
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -719,7 +719,49 @@
pass
=20
class Newick( Text ):
- pass
+ """New Hampshire/Newick Format"""
+ file_ext =3D "nhx"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """ Returning false as the newick format is too general and cannot=
be sniffed."""
+ return False
+
+
+class Nexus( Text ):
+ """Nexus format as used By Paup, Mr Bayes, etc"""
+ file_ext =3D "nex"
+
+ MetadataElement( name=3D"columns", default=3D3, desc=3D"Number of colu=
mns", readonly=3DTrue )
+
+ def __init__(self, **kwd):
+ """Initialize foobar datatype"""
+ Text.__init__(self, **kwd)
+
+ def init_meta( self, dataset, copy_from=3DNone ):
+ Text.init_meta( self, dataset, copy_from=3Dcopy_from )
+
+
+ def sniff( self, filename ):
+ """All Nexus Files Simply puts a '#NEXUS' in its first line"""
+ f =3D open(filename, "r")
+ firstline =3D f.readline().upper()
+ f.close()
+
+ if "#NEXUS" in firstline:
+ return True
+ else:
+ return False
+
=20
# ------------- Utility methods --------------
=20
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -76,3 +76,24 @@
dataset.blurb =3D 'file purged from disk'
def sniff( self, filename ):
return False
+
+class Phyloxml( GenericXml ):
+ """Format for defining phyloxml data http://www.phyloxml.org/"""
+ file_ext =3D "phyloxml"
+ def set_peek( self, dataset, is_multi_byte=3DFalse ):
+ """Set the peek and blurb text"""
+ if not dataset.dataset.purged:
+ dataset.peek =3D data.get_file_peek( dataset.file_name, is_mul=
ti_byte=3Dis_multi_byte )
+ dataset.blurb =3D 'Phyloxml data'
+ else:
+ dataset.peek =3D 'file does not exist'
+ dataset.blurb =3D 'file purged from disk'
+
+ def sniff( self, filename ):
+ """"Checking for keyword - 'phyloxml' always in lowercase in the f=
irst few lines"""
+ f =3D open(filename, "r")
+ firstlines =3D "".join(f.readlines(5))
+ f.close()
+ if "phyloxml" in firstlines:
+ return True
+ return False
\ No newline at end of file
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/visualization/phyloviz/__init__.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/__init__.py
@@ -0,0 +1,1 @@
+__author__ =3D 'Tomithy'
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/visualization/phyloviz/baseparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/baseparser.py
@@ -0,0 +1,125 @@
+import json
+
+class Node(object):
+ """Node class of PhyloTree, which represents a CLAUDE in a phylogeneti=
c tree"""
+ def __init__(self, nodeName, **kwargs):
+ """Creates a node and adds in the typical annotations"""
+ self.name, self.id =3D nodeName, kwargs.get("id", 0)
+ self.depth =3D kwargs.get("depth", 0)
+ self.children =3D []
+
+ self.isInternal =3D kwargs.get("isInternal", 0)
+ self.length, self.bootstrap =3D kwargs.get("length", 0), kwargs.ge=
t("bootstrap", None)
+ self.events =3D kwargs.get("events", "")
+
+ # clean up boot strap values
+ if self.bootstrap =3D=3D -1:
+ self.bootstrap =3D None
+
+ def addChildNode(self, child):
+ """Adds a child node to the current node"""
+ if isinstance(child, Node):
+ self.children.append(child)
+ else:
+ self.children +=3D child
+
+
+ def __str__(self):
+ return self.name + " id:" + str(self.id) + ", depth: " + str(self.=
depth)
+
+
+ def toJson(self):
+ """Converts the data in the node to a dict representation of json"=
""
+ thisJson =3D {
+ "name" : self.name,
+ "id" : self.id,
+ "depth" : self.depth,
+ "dist" : self.length
+ }
+ thisJson =3D self.addChildrenToJson(thisJson)
+ thisJson =3D self.addMiscToJson(thisJson)
+ return thisJson
+
+ def addChildrenToJson(self, jsonDict):
+ """Needs a special method to addChildren, such that the key does n=
ot appear in the Jsondict when the children is empty
+ this requirement is due to the layout algorithm used by d3 layout =
for hiding subtree """
+ if len(self.children) > 0:
+ children =3D [ node.toJson() for node in self.children]
+ jsonDict["children"] =3D children
+ return jsonDict
+
+
+ def addMiscToJson(self, jsonDict):
+ """Adds other misc attributes to json if they are present"""
+ if not self.events =3D=3D "":
+ jsonDict["events"] =3D self.events
+ if not self.bootstrap =3D=3D None:
+ jsonDict["bootstrap"] =3D self.bootstrap
+ return jsonDict
+
+
+
+class PhyloTree(object):
+ """Standardized python based class to represent the phylogenetic tree =
parsed from different
+ phylogenetic file formats."""
+
+ def __init__(self):
+ self.root, self.rootAttr =3D None, {}
+ self.nodes =3D {}
+ self.title =3D None
+ self.id =3D 1
+
+ def addAttributesToRoot(self, attrDict):
+ """Adds attributes to root, but first we put it in a temp store an=
d bind it with root when .toJson is called"""
+ for key, value in attrDict.items():
+ self.rootAttr[key] =3D value
+
+ def makeNode(self, nodeName, **kwargs):
+ """Called to make a node within PhyloTree, arbitrary kwargs can be=
passed to annotate nodes
+ Tracks the number of nodes via internally incremented id"""
+ kwargs["id"] =3D self.id
+ self.id +=3D 1
+ return Node(nodeName, **kwargs)
+
+ def addRoot(self, root):
+ """Creates a root for phyloTree"""
+ assert isinstance(root, Node)
+ root.parent =3D None
+ self.root =3D root
+
+ def generateJsonableDict(self):
+ """Changes itself into a dictonary by recurssively calling the toj=
son on all its nodes. Think of it
+ as a dict in an array of dict in an array of dict and so on..."""
+ jsonTree =3D ""
+ if self.root:
+ assert isinstance(self.root, Node)
+ jsonTree =3D self.root.toJson()
+ for key, value in self.rootAttr.items():
+ # transfer temporary stored attr to root
+ jsonTree[key] =3D value
+ else:
+ raise Exception("Root is not assigned!")
+ return jsonTree
+
+
+
+class Base_Parser(object):
+ """Base parsers contain all the methods to handle phylogeny tree creat=
ion and
+ converting the data to json that all parsers should have"""
+
+ def __init__(self):
+ self.phyloTrees =3D []
+
+ def parseFile(self, filePath):
+ """Base method that all phylogeny file parser should have"""
+ raise Exception("Base method for phylogeny file parsers is not imp=
lemented")
+
+ def toJson(self, jsonDict):
+ """Convenience method to get a json string from a python json dict=
"""
+ return json.dumps(jsonDict)
+
+ def _writeJsonToFile(self, filepath, json):
+ """Writes the file out to the system"""
+ f =3D open(filepath, "w")
+ f.writelines(json)
+ f.close()
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/visualization/phyloviz/newickparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/newickparser.py
@@ -0,0 +1,185 @@
+from baseparser import Base_Parser, PhyloTree
+import re
+
+class Newick_Parser(Base_Parser):
+ """For parsing trees stored in the newick format (.nhx)
+ It is necessarily more complex because this parser is later extended b=
y Nexus for parsing newick as well.."""
+
+
+ def __init__(self):
+ super(Newick_Parser, self).__init__()
+
+
+ def parseFile(self, filePath):
+ """Parses a newick file to obtain the string inside. Returns: json=
ableDict"""
+ with open(filePath, "r") as newickFile:
+ newickString =3D newickFile.read()
+ newickString =3D newickString.replace("\n", "").replace("\r", =
"")
+ return [self.parseData(newickString)], "Success"
+
+
+ def parseData(self, newickString):
+ """To be called on a newickString directly to parse it. Returns: j=
sonableDict"""
+ return self._parseNewickToJson(newickString)
+
+
+ def _parseNewickToJson(self, newickString, treeName=3DNone, nameMap=3D=
None):
+ """parses a newick representation of a tree into a PhyloTree data =
structure,
+ which can be easily converted to json"""
+ self.phyloTree =3D PhyloTree()
+ newickString =3D self.cleanNewickString(newickString)
+ if nameMap:
+ newickString =3D self._mapName(newickString, nameMap)
+
+ self.phyloTree.root =3D self.parseNode(newickString, 0)
+ if nameMap:
+ self.phyloTree.addAttributesToRoot({"treeName": treeName})
+
+ return self.phyloTree.generateJsonableDict()
+
+
+ def cleanNewickString(self, rawNewick):
+ """removing semi colon, and illegal json characters (\,',") and wh=
ite spaces"""
+ return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
+
+
+ def _makeNodesFromString(self, string, depth):
+ """elements separated by comma could be empty"""
+
+ if string.find("(") !=3D -1:
+ raise Exception("Tree is not well form, location: " + string)
+
+ childrenString =3D string.split(",")
+ childrenNodes =3D []
+
+ for childString in childrenString:
+ if len(childString) =3D=3D 0:
+ continue
+ nodeInfo =3D childString.split(":")
+ name, length, bootstrap =3D "", None, -1
+ if len(nodeInfo) =3D=3D 2: # has length info
+ length =3D nodeInfo[1]
+ # checking for bootstap values
+ name =3D nodeInfo[0]
+ try: # Nexus may bootstrap in names position
+ name =3D float(name)
+ if 0<=3D name <=3D 1:
+ bootstrap =3D name
+ elif 1 <=3D name <=3D 100:
+ bootstrap =3D name / 100
+ name =3D ""
+ except ValueError:
+ name =3D nodeInfo[0]
+ else:
+ name =3D nodeInfo[0] # string only contains name
+ node =3D self.phyloTree.makeNode(name, length=3Dlength, depth=
=3Ddepth, bootstrap=3D bootstrap)
+ childrenNodes +=3D [node]
+ return childrenNodes
+
+
+
+ def _mapName(self, newickString, nameMap):
+ """
+ Necessary to replace names of terms inside nexus representation
+ Also, its here because Mailaud's doesnt deal with id_strings outsi=
de of quotes(" ")
+ """
+ newString =3D ""
+ start =3D 0
+ end =3D 0
+
+ for i in xrange(len(newickString)):
+ if newickString[i] =3D=3D "(" or newickString[i] =3D=3D ",":
+ if re.match(r"[,(]", newickString[i+1:]):
+ continue
+ else:
+ end =3D i + 1
+ # i now refers to the starting position of the term to=
be replaced,
+ # we will next find j which is the ending pos of the t=
erm
+ for j in xrange(i+1, len(newickString)):
+ enclosingSymbol =3D newickString[j] # the immedi=
ate symbol after a common or left bracket which denotes the end of a term
+ if enclosingSymbol =3D=3D ")" or enclosingSymbol =
=3D=3D ":" or enclosingSymbol =3D=3D ",":
+ termToReplace =3D newickString[end:j]
+
+ newString +=3D newickString[start : end] + na=
meMap[termToReplace] #+ "'" "'" +
+ start =3D j
+ break
+
+ newString +=3D newickString[start:]
+ return newString
+
+
+ def parseNode(self, string, depth):
+ """ Recursive method for parsing newick string, works by stripping=
down the string into substring
+ of newick contained with brackers, which is used to call itself.
+ Eg ... ( A, B, (D, E)C, F, G ) ...
+ We will make the preceeding nodes first A, B, then the internal no=
de C, its children D, E,
+ and finally the succeeding nodes F, G"""
+
+ # Base case where there is only an empty string
+ if string =3D=3D "":
+ return
+ # Base case there its only an internal claude
+ if string.find("(") =3D=3D -1:
+ return self._makeNodesFromString(string, depth)
+
+ nodes, children =3D [], [] # nodes refer to the nodes on this=
level, children refers to the child of the
+ start =3D 0
+ lenOfPreceedingInternalNodeString =3D 0
+ bracketStack =3D []
+
+ for j in xrange(len(string)):
+ if string[j] =3D=3D "(": #finding the positions of all the =
open brackets
+ bracketStack.append(j)
+ continue
+ if string[j] =3D=3D ")": #finding the positions of all the =
closed brackets to extract claude
+ i =3D bracketStack.pop()
+
+ if len(bracketStack) =3D=3D 0: # is child of current node
+
+ InternalNode =3D None
+
+ #First flat call to make nodes of the same depth but f=
rom the preceeding string.
+ startSubstring =3D string[start + lenOfPreceedingInter=
nalNodeString: i]
+ preceedingNodes =3D self._makeNodesFromString(startSu=
bstring, depth)
+ nodes +=3D preceedingNodes
+
+ # Then We will try to see if the substring has any int=
ernal nodes first, make it then make nodes preceeding it and succeeding it.
+ if j + 1 < len(string):
+ stringRightOfBracket =3D string[j+1:] # Eg. '=
(b:0.4,a:0.3)c:0.3, stringRightOfBracket =3D c:0.3
+ match =3D re.search(r"[\)\,\(]", stringRightOfBrac=
ket)
+ if match:
+ indexOfNextSymbol =3D match.start()
+ stringRepOfInternalNode =3D stringRightOfBrack=
et[:indexOfNextSymbol]
+ internalNodes =3D self._makeNodesFromString( s=
tringRepOfInternalNode, depth)
+ if len(internalNodes) > 0:
+ InternalNode =3D internalNodes[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ngRepOfInternalNode)
+ else: # sometimes the node can be the last eleme=
nt of a string
+ InternalNode =3D self._makeNodesFromString(str=
ing[j+1:], depth)[0]
+ lenOfPreceedingInternalNodeString =3D len(stri=
ng) - j
+ if InternalNode =3D=3D None: #creating a generic=
node if it is unnamed
+ InternalNode =3D self.phyloTree.makeNode( "", dept=
h=3Ddepth, isInternal=3DTrue ) #"internal-" + str(depth)
+ lenOfPreceedingInternalNodeString =3D 0
+
+ # recussive call to make the internal claude
+ childSubString =3D string[ i + 1 : j ]
+ InternalNode.addChildNode(self.parseNode(childSubStrin=
g, depth + 1))
+
+ nodes.append(InternalNode) # we append the internal n=
ode later to preserve order
+
+ start =3D j + 1
+ continue
+
+ if depth =3D=3D 0: # if its the root node, we do nothing about =
it and return
+ return nodes[0]
+
+ # Adding last most set of children
+ endString =3D string[start:]
+ if string[start-1] =3D=3D ")": # if the symbol belongs to an inte=
rnal node which is created previously, then we remove it from the string le=
ft to parse
+ match =3D re.search(r"[\)\,\(]", endString)
+ if match:
+ endOfNodeName =3D start + match.start() + 1
+ endString =3D string[endOfNodeName:]
+ nodes +=3D self._makeNodesFromString(endString, depth)
+
+ return nodes
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/visualization/phyloviz/nexusparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/nexusparser.py
@@ -0,0 +1,107 @@
+from newickparser import Newick_Parser
+import re
+
+MAX_READLINES =3D 200000
+
+
+class Nexus_Parser(Newick_Parser):
+
+ def __init__(self):
+ super(Nexus_Parser, self).__init__()
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Nexus content."""
+ return self.parseNexus(filePath)
+
+
+ def parseNexus(self, filename):
+ """ Nexus data is stored in blocks between a line starting with be=
gin and another line starting with end;
+ Commends inside square brackets are to be ignored,
+ For more information: http://wiki.christophchamp.com/index.php/NEX=
US_file_format
+ Nexus can store multiple trees
+ """
+
+ with open( filename, "rt") as nex_file:
+ nexlines =3D nex_file.readlines()
+
+ rowCount =3D 0
+ inTreeBlock =3D False # sentinel to check if we are in a t=
ree block
+ intranslateBlock =3D False # sentinel to check if we are in the=
translate region of the tree. Stores synonyms of the labellings
+ self.inCommentBlock =3D False
+ self.nameMapping =3D None # stores mapping representation us=
ed in nexus format
+ treeNames =3D []
+
+ for line in nexlines:
+ line =3D line.replace(";\n", "")
+ lline =3D line.lower()
+
+ if rowCount > MAX_READLINES or (not nex_file) :
+ break
+ rowCount +=3D1
+ # We are only interested in the tree block.
+ if "begin" in lline and "tree" in lline and not inTreeBlock:
+ inTreeBlock =3D True
+ continue
+ if inTreeBlock and "end" in lline[:3]:
+ inTreeBlock, currPhyloTree =3D False, None
+ continue
+
+ if inTreeBlock:
+
+ if "title" in lline: # Adding title to the tree
+ titleLoc =3D lline.find("title")
+ title =3D line[titleLoc + 5:].replace(" ", "")
+
+ continue
+
+ if "translate" in lline:
+ intranslateBlock =3D True
+ self.nameMapping =3D {}
+ continue
+
+ if intranslateBlock:
+ mappingLine =3D self.splitLinebyWhitespaces(line)
+ key, value =3D mappingLine[1], mappingLine[2].replace(=
",", "").replace("'","") #replacing illegal json characters
+ self.nameMapping[key] =3D value
+
+ # Extracting newick Trees
+ if "tree" in lline:
+ intranslateBlock =3D False
+
+ treeLineCols =3D self.splitLinebyWhitespaces(line)
+ treeName, newick =3D treeLineCols[2], treeLineCols[-1]
+
+ if newick =3D=3D "": # Empty lines can be found in =
tree blocks
+ continue
+
+ currPhyloTree =3D self._parseNewickToJson(newick, tree=
Name, nameMap=3Dself.nameMapping)
+
+ self.phyloTrees.append(currPhyloTree)
+ treeIndex =3D len(self.phyloTrees) - 1
+ treeNames.append( (treeName, treeIndex) ) # appendi=
ng name of tree, and its index
+ continue
+
+ return self.phyloTrees, treeNames
+
+
+ def splitLinebyWhitespaces(self, line):
+ """replace tabs and write spaces to a single write space, so we ca=
n properly split it."""
+ return re.split(r"\s+", line)
+
+
+ def checkComments(self, line):
+ """Check to see if the line/lines is a comment."""
+ if not self.inCommentBlock:
+ if "[" in line:
+ if "]" not in line:
+ self.inCommentBlock =3D True
+ else:
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ else :
+ if "]" in line:
+ if line.rfind("[") > line.rfind("]"):
+ pass # a comment block is closed but an=
other is open.
+ else:
+ self.inCommentBlock =3D False
+ return "Nextline" # need to move on to the nextline =
after getting out of comment
+ return ""
\ No newline at end of file
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -0,0 +1,35 @@
+from newickparser import Newick_Parser
+from nexusparser import Nexus_Parser
+from phyloxmlparser import Phyloxml_Parser
+
+class Phyloviz_DataProvider(object):
+
+ def __init__(self):
+ pass
+
+ def parseFile(self, filepath, fileExt):
+ """returns [trees], meta
+ Trees are actually an array of JsonDicts. It's usually one tre=
e, except in the case of Nexus
+ """
+ jsonDicts, meta =3D [], {}
+ try:
+ if fileExt =3D=3D "nhx": # parses newick files
+ newickParser =3D Newick_Parser()
+ jsonDicts, parseMsg =3D newickParser.parseFile(filepath)
+ elif fileExt =3D=3D "phyloxml": # parses phyloXML files
+ phyloxmlParser =3D Phyloxml_Parser()
+ jsonDicts, parseMsg =3D phyloxmlParser.parseFile(filepath)
+ elif fileExt =3D=3D "nex": # parses nexus files
+ nexusParser =3D Nexus_Parser()
+ jsonDicts, parseMsg =3D nexusParser.parseFile(filepath)
+ meta["trees"] =3D parseMsg
+ else:
+ raise Exception("File type is not supported")
+
+ meta["msg"] =3D parseMsg
+
+ except Exception:
+ jsonDicts, meta["msg"] =3D [], "Parse failed"
+
+ return jsonDicts, meta
+
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
@@ -0,0 +1,145 @@
+from baseparser import Base_Parser, PhyloTree, Node
+from lxml import etree
+
+class Phyloxml_Parser(Base_Parser):
+ """Parses a phyloxml file into a json file that will be passed to Phyl=
oViz for display"""
+
+ def __init__(self):
+ super(Phyloxml_Parser, self).__init__()
+ self.phyloTree =3D PhyloTree()
+ self.tagsOfInterest =3D {
+ "clade": "",
+ "name" : "name",
+ "branch_length" : "length",
+ "confidence" : "bootstrap",
+ "events" : "events"
+ }
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Phylogeny Tree content."""
+ phyloXmlFile =3D open(filePath, "r")
+
+ xmlTree =3D etree.parse(phyloXmlFile)
+ xmlRoot =3D xmlTree.getroot()[0]
+ self.nameSpaceIndex =3D xmlRoot.tag.rfind("}") + 1 # used later by=
the clean tag method to remove the name space in every element.tag
+
+ phyloRoot =3D None
+ for child in xmlRoot:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ phyloRoot =3D child
+ elif childTag =3D=3D "name":
+ self.phyloTree.title =3D child.text
+
+ self.phyloTree.root =3D self.parseNode(phyloRoot, 0)
+ jsonDict =3D self.phyloTree.generateJsonableDict()
+ return [jsonDict], "Success"
+
+
+ def parseNode(self, node, depth):
+ """Parses any node within a phyloxml tree and looks out for claude=
, which signals the creation of
+ nodes - internal OR leaf"""
+ assert isinstance(node, etree._Element)
+
+ tag =3D self.cleanTag(node.tag)
+ if not tag =3D=3D "clade":
+ return None
+ hasInnerClade =3D False
+
+ # peeking once for parent and once for child to check if the node =
is internal
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ hasInnerClade =3D True
+ break
+
+ if hasInnerClade: # this node is an internal node
+ currentNode =3D self._makeInternalNode(node, depth=3D depth)
+ for child in node:
+ child =3D self.parseNode(child, depth + 1)
+ if isinstance(child, Node):
+ currentNode.addChildNode(child)
+
+ else: # this node is a leaf node
+ currentNode =3D self._makeLeafNode(node, depth=3Ddepth+1)
+
+ return currentNode
+
+
+ def _makeLeafNode(self, leafNode, depth =3D 0 ):
+ """Makes leaf nodes by calling Phylotree methods"""
+ node =3D {}
+ for child in leafNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in self.tagsOfInterest:
+ key =3D self.tagsOfInterest[childTag] # need to map phy=
loxml terms to ours
+ node[key] =3D child.text
+
+ node["depth"] =3D depth
+ return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
+
+ def _getNodeName(self, node, depth=3D-1):
+ """Gets the name of a claude. It handles the case where a taxonomy=
node is involved"""
+
+ def getTagFromTaxonomyNode(node):
+ """Returns the name of a taxonomy node. A taxonomy node have t=
o be treated differently as the name
+ is embedded one level deeper"""
+ phyloxmlTaxoNames =3D {
+ "common_name" : "",
+ "scientific_name" : "",
+ "code" : ""
+ }
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag in phyloxmlTaxoNames:
+ return child.text
+ return ""
+
+ nodeName =3D ""
+ for child in node:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "name" :
+ nodeName =3D child.text
+ break
+ elif childTag =3D=3D "taxonomy":
+ nodeName =3D getTagFromTaxonomyNode(child)
+ break
+
+ return nodeName
+
+
+ def _makeInternalNode(self, internalNode, depth=3D0):
+ """ Makes an internal node from an element object that is gurantee=
d to be a parent node.
+ Gets the value of interests like events and appends it to a custom=
node object that will be passed to PhyloTree to make nodes
+ """
+ node =3D {}
+ for child in internalNode:
+ childTag =3D self.cleanTag(child.tag)
+ if childTag =3D=3D "clade":
+ continue
+ elif childTag in self.tagsOfInterest:
+ if childTag =3D=3D "events": # events is nested 1 more =
level deeper than others
+ key, text =3D "events", self.cleanTag(child[0].tag)
+ else:
+ key =3D self.tagsOfInterest[childTag]
+ text =3D child.text
+ node[key] =3D text
+
+
+ return self.phyloTree.makeNode(self._getNodeName(internalNode, dep=
th), **node)
+
+
+ def cleanTag(self, tagString):
+ return tagString[self.nameSpaceIndex:]
+
+
+if __name__=3D=3D"__main__":
+
+ # Files tested against
+ parser =3D Phyloxml_Parser()
+ filepath =3D "../data/" +"apaf.xml"
+ # filepath =3D "../data/" +"12_multiple_supports.xml"
+
+ # filepath =3D "../data/" +"bcl_2.xml"
+ # filepath =3D "../data/" +"reducedXml.xml"
+ parser.parseFile(filepath)
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/web/controllers/phyloviz.py
--- /dev/null
+++ b/lib/galaxy/web/controllers/phyloviz.py
@@ -0,0 +1,97 @@
+import pkg_resources
+pkg_resources.require( "bx-python" )
+
+from galaxy.util.json import to_json_string, from_json_string
+from galaxy.web.base.controller import *
+from galaxy.visualization.phyloviz.phyloviz_dataprovider import Phyloviz_D=
ataProvider
+
+
+class PhyloVizController( BaseUIController, UsesVisualizationMixin, UsesHi=
storyDatasetAssociationMixin, SharableMixin ):
+ """
+ Controller for phyloViz browser interface.
+ """
+ def __init__(self, app ):
+ BaseUIController.__init__( self, app )
+
+ @web.expose
+ @web.require_login()
+ def index( self, trans, dataset_id =3D None, **kwargs ):
+ """
+ The index method is called using phyloviz/ with a dataset id passe=
d in.
+ The relevant data set is then retrieved via get_json_from_datasetI=
d which interfaces with the parser
+ The json representation of the phylogenetic tree along with the co=
nfig is then written in the .mako template and passed back to the user
+ """
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id)
+ config["saved_visualization"] =3D False
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D json, config=3Dconfig)
+
+
+ @web.expose
+ def visualization(self, trans, id):
+ """
+ Called using a viz_id (id) to retrieved stored visualization data =
(in json format) and all the viz_config
+ """
+ viz =3D self.get_visualization(trans, id)
+ config =3D self.get_visualization_config(trans, viz)
+ config["saved_visualization"] =3D True
+ data =3D config["root"]
+
+ return trans.fill_template( "visualization/phyloviz.mako", data =
=3D data, config=3Dconfig)
+
+
+ @web.expose
+ @web.json
+ def load_visualization_json(self, trans, viz_id):
+ """
+ Though not used in current implementation, this provides user with=
a convenient method to retrieve the viz_data & viz_config via json.
+ """
+ viz =3D self.get_visualization(trans, viz_id)
+ viz_config =3D self.get_visualization_config(trans, viz)
+ viz_config["saved_visualization"] =3D True
+ return {
+ "data" : viz_config["root"],
+ "config" : viz_config
+ }
+
+
+ @web.expose
+ @web.json
+ def getJsonData(self, trans, dataset_id, treeIndex=3D0):
+ """
+ Method to retrieve data asynchronously via json format. Retriving =
from here rather than
+ making a direct datasets/ call allows for some processing and even=
t capturing
+ """
+ treeIndex =3D int(treeIndex)
+ json, config =3D self.get_json_from_datasetId(trans, dataset_id, t=
reeIndex)
+ packedJson =3D {
+ "data" : json,
+ "config" : config
+ }
+
+ return packedJson
+
+
+ def get_json_from_datasetId(self, trans, dataset_id, treeIndex=3D0):
+ """
+ For interfacing phyloviz controllers with phyloviz visualization d=
ata provider (parsers)
+ """
+ dataset =3D self.get_dataset(trans, dataset_id)
+ fileExt, filepath =3D dataset.ext, dataset.file_name # .name=
stores the name of the dataset from the orginal upload
+ json, config =3D "", {} # config contains propertie=
s of the tree and file
+
+ if fileExt =3D=3D "json":
+ something, json =3D self.get_data(dataset)
+ else:
+ try:
+ pd =3D Phyloviz_DataProvider()
+ json, config =3D pd.parseFile(filepath, fileExt)
+ json =3D json[treeIndex]
+ except Exception:
+ pass
+
+ config["title"] =3D dataset.display_name()
+ config["ext"] =3D fileExt
+ config["dataset_id"] =3D dataset_id
+ config["treeIndex"] =3D treeIndex
+
+ return json, config
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -16,6 +16,10 @@
action =3D "paramamonster"
elif item.type =3D=3D "circster":
action =3D "circster"
+ elif item.type =3D=3D "phyloviz":
+ # Support phyloviz
+ controller =3D "phyloviz"
+ action =3D "visualization"
return dict( controller=3Dcontroller, action=3Daction, id=3Ditem.i=
d )
=20
# Grid definition
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 static/scripts/viz/phyloviz.js
--- /dev/null
+++ b/static/scripts/viz/phyloviz.js
@@ -0,0 +1,955 @@
+var UserMenuBase =3D Backbone.View.extend({
+ /**
+ * Base class of any menus that takes in user interaction. Contains ch=
ecking methods.
+ */
+
+ className: 'UserMenuBase',
+
+ isAcceptableValue : function ($inputKey, min, max) {
+ /**
+ * Check if an input value is a number and falls within max min.
+ */
+ var self =3D this,
+ value =3D $inputKey.val(),
+ fieldName =3D $inputKey.attr("displayLabel") || $inputKey.attr=
("id").replace("phyloViz", "");
+
+ function isNumeric(n) {
+ return !isNaN(parseFloat(n)) && isFinite(n);
+ }
+
+ if (!isNumeric(value)){
+ alert(fieldName + " is not a number!");
+ return false;
+ }
+
+ if ( value > max){
+ alert(fieldName + " is too large.");
+ return false;
+ } else if ( value < min) {
+ alert(fieldName + " is too small.");
+ return false;
+ }
+ return true;
+ },
+
+ hasIllegalJsonCharacters : function($inputKey) {
+ /**
+ * Check if any user string inputs has illegal characters that jso=
n cannot accept
+ */
+ if ($inputKey.val().search(/"|'|\\/) !=3D=3D -1){
+ alert("Named fields cannot contain these illegal characters: d=
ouble quote(\"), single guote(\'), or back slash(\\). ");
+ return true;
+ }
+ return false;
+ }
+});
+
+
+function PhyloTreeLayout() {
+ /**
+ * -- Custom Layout call for phyloViz to suit the needs of a phylogene=
tic tree.
+ * -- Specifically: 1) Nodes have a display display of (=3D evo dist X=
depth separation) from their parent
+ * 2) Nodes must appear in other after they have expa=
nd and contracted
+ */
+
+ var self =3D this,
+ hierarchy =3D d3.layout.hierarchy().sort(null).value(null),
+ height =3D 360, // ! represents both the layout angle and the heig=
ht of the layout, in px
+ layoutMode =3D "Linear",
+ leafHeight =3D 18, // height of each individual leaf node
+ depthSeparation =3D 200, // separation between nodes of different =
depth, in px
+ leafIndex =3D 0, // change to recurssive call
+ defaultDist =3D 0.5, // tree defaults to 0.5 dist if no dist is sp=
ecified
+ maxTextWidth =3D 50; // maximum length of the text labels
+
+
+ self.leafHeight =3D function(inputLeafHeight){
+ if (typeof inputLeafHeight =3D=3D=3D "undefined"){ return leafHeig=
ht; }
+ else { leafHeight =3D inputLeafHeight; return self;}
+ };
+
+ self.layoutMode =3D function(mode){
+ if (typeof mode =3D=3D=3D "undefined"){ return layoutMode; }
+ else { layoutMode =3D mode; return self;}
+ };
+
+ self.layoutAngle =3D function(angle) { // changes the layout angle =
of the display, which is really changing the height
+ if (typeof angle =3D=3D=3D "undefined"){ return height; }
+ if (isNaN(angle) || angle < 0 || angle > 360) { return self; } // =
to use default if the user puts in strange values
+ else { height =3D angle; return self;}
+ };
+
+ self.separation =3D function(dist){ // changes the dist between the =
nodes of different depth
+ if (typeof dist =3D=3D=3D "undefined"){ return depthSeparation; }
+ else { depthSeparation =3D dist; return self;}
+ };
+
+ self.links =3D function (nodes) { // uses d3 native method to gene=
rate links. Done.
+ return d3.layout.tree().links(nodes);
+ };
+
+ // -- Custom method for laying out phylogeny tree in a linear fashion
+ self.nodes =3D function (d, i) {
+ var _nodes =3D hierarchy.call(self, d, i), // self is to f=
ind the depth of all the nodes, assumes root is passed in
+ nodes =3D [],
+ maxDepth =3D 0,
+ numLeaves =3D 0;
+
+ // changing from hierarchy's custom format for data to usable form=
at
+ _nodes.forEach(function (_node){
+ var node =3D _node.data;
+ node.depth =3D _node.depth;
+ maxDepth =3D node.depth > maxDepth ? node.depth : maxDepth; /=
/finding max depth of tree
+ nodes.push(node);
+ });
+ // counting the number of leaf nodes and assigning max depth to no=
des that do not have children to flush all the leave nodes
+ nodes.forEach(function(node){
+ if ( !node.children ) { //&& !node._children
+ numLeaves +=3D 1;
+ node.depth =3D maxDepth; // if a leaf has no child it woul=
d be assigned max depth
+ }
+ });
+
+ leafHeight =3D layoutMode =3D=3D=3D "Circular" ? height / numLeave=
s : leafHeight;
+ leafIndex =3D 0;
+ layout(nodes[0], maxDepth, leafHeight, null);
+
+ return nodes;
+ };
+
+
+ function layout (node, maxDepth, vertSeparation, parent) {
+ /**
+ * -- Function with side effect of adding x0, y0 to all child; tak=
e in the root as starting point
+ * assuming that the leave nodes would be sorted in presented ord=
er
+ * horizontal(y0) is calculated according to (=3D evo dis=
t X depth separation) from their parent
+ * vertical (x0) - if leave node: find its order in all o=
f the leave node =3D=3D=3D node.id, then multiply by verticalSeparation
+ * - if parent node: is place in the mid point al=
l of its children nodes
+ * -- The layout will first calculate the y0 field going towards t=
he leaves, and x0 when returning
+ */
+ var children =3D node.children,
+ sumChildVertSeparation =3D 0;
+
+ // calculation of node's dist from parents, going down.
+ var dist =3D node.dist || defaultDist;
+ dist =3D dist > 1 ? 1 : dist; // We constrain all dist to be l=
ess than one
+ node.dist =3D dist;
+ if (parent !=3D=3D null){
+ node.y0 =3D parent.y0 + dist * depthSeparation;
+ } else { //root node
+ node.y0 =3D maxTextWidth;
+ }
+
+
+ // if a node have no children, we will treat it as a leaf and star=
t laying it out first
+ if (!children) {
+ node.x0 =3D leafIndex++ * vertSeparation;
+ } else {
+ // if it has children, we will visit all its children and calc=
ulate its position from its children
+ children.forEach( function (child) {
+ child.parent =3D node;
+ sumChildVertSeparation +=3D layout(child, maxDepth, vertSe=
paration, node);
+ });
+ node.x0 =3D sumChildVertSeparation / children.length;
+ }
+
+ // adding properties to the newly created node
+ node.x =3D node.x0;
+ node.y =3D node.y0;
+ return node.x0;
+ }
+ return self;
+}
+
+
+/**
+ * -- PhyloTree Model --
+ */
+var PhyloTree =3D Visualization.extend({
+ defaults : {
+ layout: "Linear",
+ separation : 250, // px dist between nodes of different depth t=
o represent 1 evolutionary until
+ leafHeight: 18,
+ type : "phyloviz", // visualization type
+ title : "Title",
+ scaleFactor: 1,
+ translate: [0,0],
+ fontSize: 12, //fontSize of node label
+ selectedNode : null,
+ nodeAttrChangedTime : 0
+ },
+
+ root : {}, // Root has to be its own independent object because it is =
not part of the viz_config
+
+ toggle : function (d) {
+ /**
+ * Mechanism to expand or contract a single node. Expanded nodes h=
ave a children list, while for
+ * contracted nodes the list is stored in _children. Nodes with th=
eir children data stored in _children will not have their
+ * children rendered.
+ */
+ if(typeof d =3D=3D=3D "undefined") {return ;}
+ if (d.children ) {
+ d._children =3D d.children;
+ d.children =3D null;
+ } else {
+ d.children =3D d._children;
+ d._children =3D null;
+ }
+ },
+
+ toggleAll : function(d) {
+ /**
+ * Contracts the phylotree to a single node by repeatedly calling=
itself to place all the list
+ * of children under _children.
+ */
+ if (d.children && d.children.length !=3D=3D 0) {
+ d.children.forEach(this.toggleAll);
+ toggle(d);
+ }
+ },
+
+ getData : function (){
+ /**
+ * Return the data of the tree. Used for preserving state.
+ */
+ return this.root;
+ },
+
+ save: function() {
+ /**
+ * Overriding the default save mechanism to do some clean of circu=
lar reference of the
+ * phyloTree and to include phyloTree in the saved json
+ */
+ var root =3D this.root;
+ cleanTree(root);
+ this.set("root", root);
+
+ function cleanTree(node){
+ // we need to remove parent to delete circular reference
+ delete node.parent;
+
+ // removing unnecessary attributes
+ if (node._selected){ delete node._selected;}
+
+ node.children ? node.children.forEach(cleanTree) : 0;
+ node._children ? node._children.forEach(cleanTree) : 0;
+ }
+
+ var config =3D jQuery.extend(true, {}, this.attributes);
+ config["selectedNode"] =3D null;
+
+ show_message("Saving to Galaxy", "progress");
+
+ return $.ajax({
+ url: this.url(),
+ type: "POST",
+ dataType: "json",
+ data: {
+ vis_json: JSON.stringify(config)
+ },
+ success: function(res){
+ var viz_id =3D res.url.split("id=3D")[1].split("&")[0],
+ viz_url =3D "/phyloviz/visualization?id=3D" + viz_id;
+ window.history.pushState({}, "", viz_url + window.location=
.hash);
+ hide_modal();
+ }
+ });
+ }
+});
+
+
+
+/**
+ * -- Views --
+ */
+var PhylovizLayoutBase =3D Backbone.View.extend({
+ /**
+ * Stores the default variable for setting up the visualization
+ */
+ defaults : {
+ nodeRadius : 4.5 // radius of each node in the diagram
+ },
+
+
+ stdInit : function (options) {
+ /**
+ * Common initialization in layouts
+ */
+
+ var self =3D this;
+ self.model.on("change:separation change:leafHeight change:fontSize=
change:nodeAttrChangedTime", self.updateAndRender, self);
+
+ self.vis =3D options.vis;
+ self.i =3D 0;
+ self.maxDepth =3D -1; // stores the max depth of the tree
+
+ self.width =3D options.width;
+ self.height =3D options.height;
+ },
+
+
+ updateAndRender : function(source) {
+ /**
+ * Updates the visualization whenever there are changes in the ex=
pansion and contraction of nodes
+ * AND possibly when the tree is edited.
+ */
+ var vis =3D d3.select(".vis"),
+ self =3D this;
+ source =3D source || self.model.root;
+
+ self.renderNodes(source);
+ self.renderLinks(source);
+ self.addTooltips();
+ },
+
+
+ renderLinks : function(source) {
+ /**
+ * Renders the links for the visualization.
+ */
+ var self =3D this;
+ var diagonal =3D self.diagonal;
+ var duration =3D self.duration;
+ var layoutMode =3D self.layoutMode;
+ var link =3D self.vis.selectAll("g.completeLink")
+ .data(self.tree.links(self.nodes), function(d) { return d.targ=
et.id; });
+
+ var calcalateLinePos =3D function(d) {
+ d.pos0 =3D d.source.y0 + " " + d.source.x0; // position of t=
he source node <=3D> starting location of the line drawn
+ d.pos1 =3D d.source.y0 + " " + d.target.x0; // position where=
the line makes a right angle bend
+ d.pos2 =3D d.target.y0 + " " + d.target.x0; // point where=
the horizontal line becomes a dotted line
+ };
+
+ var linkEnter =3D link.enter().insert("svg:g","g.node")
+ .attr("class", "completeLink");
+
+
+ linkEnter.append("svg:path")
+ .attr("class", "link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1;
+ });
+
+ var linkUpdate =3D link.transition().duration(500);
+
+ linkUpdate.select("path.link")
+ .attr("d", function(d) {
+ calcalateLinePos(d);
+ return "M " + d.pos0 + " L " + d.pos1 + " L " + d.pos2;
+ });
+
+ var linkExit =3D link.exit().remove();
+
+ },
+
+ // User Interaction methods below
+
+ selectNode : function(node){
+ /**
+ * Displays the information for editting
+ */
+ var self =3D this;
+ d3.selectAll("g.node")
+ .classed("selectedHighlight", function(d){
+ if (node.id =3D=3D=3D d.id){
+ if(node._selected) { // for de=3Dselecting node.
+ delete node._selected;
+ return false;
+ } else {
+ node._selected =3D true;
+ return true;
+ }
+ }
+ return false;
+ });
+
+ self.model.set("selectedNode", node);
+ $("#phyloVizSelectedNodeName").val(node.name);
+ $("#phyloVizSelectedNodeDist").val(node.dist);
+ $("#phyloVizSelectedNodeAnnotation").val(node.annotation || "");
+ },
+
+ addTooltips : function (){
+ /**
+ * Creates bootstrap tooltip for the visualization. Has to be cal=
led repeatedly due to newly generated
+ * enterNodes
+ */
+ $(".bs-tooltip").remove(); //clean up tooltip, just in case i=
ts listeners are removed by d3
+ $(".node")
+ .attr("data-original-title", function(){
+ var d =3D this.__data__,
+ annotation =3D d.annotation || "None" ;
+ return d ? (d.name ? d.name + "<br/>" : "") + "Dist: " + d=
.dist + " <br/>Annotation: " + annotation: "";
+ })
+ .tooltip({'placement':'top', 'trigger' : 'hover'});
+
+ }
+});
+
+
+
+
+var PhylovizLinearView =3D PhylovizLayoutBase.extend({
+ /**
+ * Linea layout class of Phyloviz, is responsible for rendering the no=
des
+ * calls PhyloTreeLayout to determine the positions of the nodes
+ */
+ initialize : function(options){
+ // Default values of linear layout
+ var self =3D this;
+ self.margins =3D options.margins;
+ self.layoutMode =3D "Linear";
+
+ self.stdInit(options);
+
+ self.layout();
+ self.updateAndRender(self.model.root);
+ },
+
+ layout : function() {
+ /**
+ * Creates the basic layout of a linear tree by precalculating fix=
ed values.
+ * One of calculations are also made here
+ */
+
+ var self =3D this;
+
+ self.tree =3D new PhyloTreeLayout().layoutMode("Linear");
+ self.diagonal =3D d3.svg.diagonal()
+ .projection(function(d) { return [d.y, d.x ]; });
+ },
+
+ renderNodes : function (source) {
+ /**
+ * Renders the nodes base on Linear layout.
+ */
+ var self =3D this,
+ fontSize =3D self.model.get("fontSize") + "px";
+
+ // assigning properties from models
+ self.tree.separation(self.model.get("separation")).leafHeight(self=
.model.get("leafHeight"));
+
+ var duration =3D 500,
+ nodes =3D self.tree.separation(self.model.get("separation")).n=
odes(self.model.root);
+
+ var node =3D self.vis.selectAll("g.node")
+ .data(nodes, function(d) { return d.name + d.id || (d.id =3D +=
+self.i); });
+
+ // These variables has to be passed into update links which are in=
the base methods
+ self.nodes =3D nodes;
+ self.duration =3D duration;
+
+ // ------- D3 ENTRY --------
+ // Enter any new nodes at the parent's previous position.
+ var nodeEnter =3D node.enter().append("svg:g")
+ .attr("class", "node")
+ .on("dblclick", function(){ d3.event.stopPropagation(); })
+ .on("click", function(d) {
+ if (d3.event.altKey) {
+ self.selectNode(d); // display info if alt is p=
ressed
+ } else {
+ if(d.children && d.children.length =3D=3D=3D 0){ retur=
n;} // there is no need to toggle leaves
+ self.model.toggle(d); // contract/expand nodes at da=
ta level
+ self.updateAndRender(d); // re-render the tree
+ }
+ });
+
+ nodeEnter.attr("transform", function(d) { return "translate(" + so=
urce.y0 + "," + source.x0 + ")"; });
+
+ nodeEnter.append("svg:circle")
+ .attr("r", 1e-6)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeEnter.append("svg:text")
+ .attr("class", "nodeLabel")
+ .attr("x", function(d) { return d.children || d._children ? -1=
0 : 10; })
+ .attr("dy", ".35em")
+ .attr("text-anchor", function(d) { return d.children || d._chi=
ldren ? "end" : "start"; })
+ .style("fill-opacity", 1e-6);
+
+ // ------- D3 TRANSITION --------
+ // Transition nodes to their new position.
+ var nodeUpdate =3D node.transition()
+ .duration(duration);
+
+ nodeUpdate.attr("transform", function(d) {
+ return "translate(" + d.y + "," + d.x + ")"; });
+
+ nodeUpdate.select("circle")
+ .attr("r", self.defaults.nodeRadius)
+ .style("fill", function(d) { return d._children ? "lightsteelb=
lue" : "#fff"; });
+
+ nodeUpdate.select("text")
+ .style("fill-opacity", 1)
+ .style("font-size", fontSize)
+ .text(function(d) { return d.name; });
+
+ // ------- D3 EXIT --------
+ // Transition exiting nodes to the parent's new position.
+ var nodeExit =3Dnode.exit().transition()
+ .duration(duration)
+ .remove();
+
+ nodeExit.select("circle")
+ .attr("r", 1e-6);
+
+ nodeExit.select("text")
+ .style("fill-opacity", 1e-6);
+
+ // Stash the old positions for transition.
+ nodes.forEach(function(d) {
+ d.x0 =3D d.x; // we need the x0, y0 for parents with children
+ d.y0 =3D d.y;
+ });
+ }
+
+});
+
+var PhylovizView =3D Backbone.View.extend({
+
+ className: 'phyloviz',
+
+ initialize: function(options) {
+ var self =3D this;
+ // -- Default values of the vis
+ self.MIN_SCALE =3D 0.05; //for zooming
+ self.MAX_SCALE =3D 5;
+ self.MAX_DISPLACEMENT =3D 500;
+ self.margins =3D [10, 60, 10, 80];
+
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.radius =3D self.width;
+ self.data =3D options.data;
+
+ // -- Events Phyloviz view responses to
+ $(window).resize(function(){
+ self.width =3D $("#PhyloViz").width();
+ self.height =3D $("#PhyloViz").height();
+ self.render();
+ });
+
+ // -- Create phyloTree model
+ self.phyloTree =3D new PhyloTree(options.config);
+ self.phyloTree.root =3D self.data;
+
+ // -- Set up UI functions of main view
+ self.zoomFunc =3D d3.behavior.zoom().scaleExtent([self.MIN_SCALE, =
self.MAX_SCALE]);
+ self.zoomFunc.translate(self.phyloTree.get("translate"));
+ self.zoomFunc.scale(self.phyloTree.get("scaleFactor"));
+
+ // -- set up header buttons, search and settings menu
+ self.navMenu =3D new HeaderButtons(self);
+ self.settingsMenu =3D new SettingsMenu({phyloTree : self.phyloTree=
});
+ self.nodeSelectionView =3D new NodeSelectionView({phyloTree : self=
.phyloTree});
+ self.search =3D new PhyloVizSearch();
+
+
+ setTimeout(function(){ // using settimeout to call the zoomAn=
dPan function according to the stored attributes in viz_config
+ self.zoomAndPan();
+ }, 1000);
+ },
+
+ render: function(){
+ // -- Creating helper function for vis. --
+ var self =3D this;
+ $("#PhyloViz").empty();
+
+ // -- Layout viz. --
+ self.mainSVG =3D d3.select("#PhyloViz").append("svg:svg")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("pointer-events", "all")
+ .call(self.zoomFunc.on("zoom", function(){
+ self.zoomAndPan();
+ }));
+
+ self.boundingRect =3D self.mainSVG.append("svg:rect")
+ .attr("class", "boundingRect")
+ .attr("width", self.width)
+ .attr("height", self.height)
+ .attr("stroke", "black")
+ .attr("fill", "white");
+
+ self.vis =3D self.mainSVG
+ .append("svg:g")
+ .attr("class", "vis");
+
+ self.layoutOptions =3D {
+ model : self.phyloTree,
+ width : self.width,
+ height : self.height,
+ vis: self.vis,
+ margins: self.margins
+ };
+
+ // -- Creating Title
+ $("#title").text("Phylogenetic Tree from " + self.phyloTree.get("t=
itle") + ":");
+
+ // -- Create Linear view instance --
+ var linearView =3D new PhylovizLinearView(self.layoutOptions)
+ },
+
+ zoomAndPan : function(event){
+ /**
+ * Function to zoom and pan the svg element which the entire tree =
is contained within
+ * Uses d3.zoom events, and extend them to allow manual updates an=
d keeping states in model
+ */
+ if (typeof event !=3D=3D "undefined") {
+ var zoomParams =3D event.zoom,
+ translateParams =3D event.translate;
+ }
+
+ var self =3D this,
+ scaleFactor =3D self.zoomFunc.scale(),
+ translationCoor =3D self.zoomFunc.translate(),
+ zoomStatement =3D "",
+ translateStatement =3D "";
+
+ // Do manual scaling.
+ switch (zoomParams) {
+ case "reset":
+ scaleFactor =3D 1.0;
+ translationCoor =3D [0,0]; break;
+ case "+":
+ scaleFactor *=3D 1.1; break;
+ case "-":
+ scaleFactor *=3D 0.9; break;
+ default:
+ if (typeof zoomParams =3D=3D=3D "number") {
+ scaleFactor =3D zoomParams;
+ } else if (d3.event !=3D=3D null) {
+ scaleFactor =3D d3.event.scale;
+ }
+ }
+ if (scaleFactor < self.MIN_SCALE || scaleFactor > self.MAX_SCALE) =
{ return;}
+ self.zoomFunc.scale(scaleFactor); //update scale Factor
+ zoomStatement =3D "translate(" + self.margins[3] + "," + self.mar=
gins[0] + ")" +
+ " scale(" + scaleFactor + ")";
+
+ // Do manual translation.
+ if( d3.event !=3D=3D null) {
+ translateStatement =3D "translate(" + d3.event.translate + ")";
+ } else {
+ if(typeof translateParams !=3D=3D "undefined") {
+ var x =3D translateParams.split(",")[0];
+ var y =3D translateParams.split(",")[1];
+ if (!isNaN(x) && !isNaN(y)){
+ translationCoor =3D [translationCoor[0] + parseFloat(x=
), translationCoor[1] + parseFloat(y)];
+ }
+ }
+ self.zoomFunc.translate(translationCoor); // update zoomFunc
+ translateStatement =3D "translate(" + translationCoor + ")";
+ }
+
+ self.phyloTree.set("scaleFactor", scaleFactor);
+ self.phyloTree.set("translate", translationCoor);
+ self.vis.attr("transform", translateStatement + zoomStatement); //=
refers to the view that we are actually zooming
+ },
+
+
+ reloadViz : function() {
+ /**
+ * Primes the Ajax URL to load another Nexus tree
+ */
+ var self =3D this,
+ treeIndex =3D $("#phylovizNexSelector :selected").val(),
+ dataset_id =3D self.phyloTree.get("dataset_id"),
+ url =3D "phyloviz/getJsonData?dataset_id=3D" + dataset_id + "&=
treeIndex=3D" + String(treeIndex);
+ $.getJSON(url, function(packedJson){
+ window.initPhyloViz(packedJson.data, packedJson.config);
+ });
+ }
+});
+
+
+var HeaderButtons =3D Backbone.View.extend({
+
+ initialize : function(phylovizView){
+ var self =3D this;
+ self.phylovizView =3D phylovizView;
+
+ // Clean up code - if the class initialized more than once
+ $("#panelHeaderRightBtns").empty();
+ $("#phyloVizNavBtns").empty();
+ $("#phylovizNexSelector").off();
+
+ self.initNavBtns();
+ self.initRightHeaderBtns();
+
+ // Initial a tree selector in the case of nexus
+ $("#phylovizNexSelector").off().on("change", function() {self.phy=
lovizView.reloadViz();} );
+
+ },
+
+ initRightHeaderBtns : function(){
+ var self =3D this;
+
+ rightMenu =3D create_icon_buttons_menu([
+ { icon_class: 'gear', title: 'PhyloViz Settings', on_click: fu=
nction(){
+ $("#SettingsMenu").show();
+ self.settingsMenu.updateUI();
+ } },
+ { icon_class: 'disk', title: 'Save visualization', on_click: f=
unction() {
+ var nexSelected =3D $("#phylovizNexSelector option:selecte=
d").text();
+ if(nexSelected) {
+ self.phylovizView.phyloTree.set("title", nexSelected);
+ }
+ self.phylovizView.phyloTree.save();
+ } },
+ { icon_class: 'chevron-expand', title: 'Search / Edit Nodes', =
on_click: function() {
+ $("#nodeSelectionView").show();
+ } },
+ { icon_class: 'information', title: 'Phyloviz Help', on_click:=
function() {
+ window.open('http://wiki.g2.bx.psu.edu/Learn/Visualization=
/PhylogeneticTree');
+ // https://docs.google.com/document/d/1AXFoJgEpxr21H3LICRs=
3EyMe1B1X_KFPouzIgrCz3zk/edit
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#panelHeaderRightBtns").append(rightMenu.$el);
+ },
+
+ initNavBtns: function() {
+ var self =3D this,
+ navMenu =3D create_icon_buttons_menu([
+ { icon_class: 'zoom-in', title: 'Zoom in', on_click: funct=
ion() {
+ self.phylovizView.zoomAndPan({ zoom : "+"});
+ } },
+ { icon_class: 'zoom-out', title: 'Zoom out', on_click: fun=
ction() {
+ self.phylovizView.zoomAndPan({ zoom : "-"});
+ } },
+ { icon_class: 'arrow-circle', title: 'Reset Zoom/Pan', on_=
click: function() {
+ self.phylovizView.zoomAndPan({ zoom : "reset"});
+ } }
+ ],
+ {
+ tooltip_config: { placement: 'bottom' }
+ });
+ $("#phyloVizNavBtns").append(navMenu.$el);
+ }
+});
+
+
+var SettingsMenu =3D UserMenuBase.extend({
+
+ className: 'Settings',
+
+ initialize: function(options){
+ // settings needs to directly interact with the phyloviz model so =
it will get access to it.
+ var self =3D this;
+ self.phyloTree =3D options.phyloTree;
+ self.el =3D $("#SettingsMenu");
+ self.inputs =3D {
+ separation : $("#phyloVizTreeSeparation"),
+ leafHeight : $("#phyloVizTreeLeafHeight"),
+ fontSize : $("#phyloVizTreeFontSize")
+ };
+
+ //init all buttons of settings
+ $("#settingsCloseBtn").off().on("click", function() { self.el.hide=
(); });
+ $("#phylovizResetSettingsBtn").off().on("click", function() { self=
.resetToDefaults(); });
+ $("#phylovizApplySettingsBtn").off().on("click", function() { self=
.apply(); });
+ },
+
+ apply : function(){
+ /**
+ * Applying user values to phylotree model.
+ */
+ var self =3D this;
+ if (!self.isAcceptableValue(self.inputs["separation"], 50, 2500) ||
+ !self.isAcceptableValue(self.inputs["leafHeight"], 5, 30) ||
+ !self.isAcceptableValue(self.inputs["fontSize"], 5, 20)){
+ return;
+ }
+ $.each(self.inputs, function(key, $input){
+ self.phyloTree.set(key, $input.val());
+ });
+ },
+ updateUI : function(){
+ /**
+ * Called to update the values input to that stored in the model
+ */
+ var self =3D this;
+ $.each(self.inputs, function(key, $input){
+ $input.val(self.phyloTree.get(key));
+ });
+ },
+ resetToDefaults : function(){
+ /**
+ * Resets the value of the phyloTree model to its default
+ */
+ $(".bs-tooltip").remove(); // just in case the tool tip was n=
ot removed
+ var self =3D this;
+ $.each(self.phyloTree.defaults, function(key, value) {
+ self.phyloTree.set(key, value);
+ });
+ self.updateUI();
+ },
+
+ render: function(){
+
+ }
+
+});
+
+
+var NodeSelectionView =3D UserMenuBase.extend({
+ /**
+ * View for inspecting node properties and editing them
+ */
+ className: 'Settings',
+
+ initialize : function (options){
+ var self =3D this;
+ self.el =3D $("#nodeSelectionView");
+ self.phyloTree =3D options.phyloTree;
+
+ self.UI =3D {
+ enableEdit : $('#phylovizEditNodesCheck'),
+ saveChanges : $('#phylovizNodeSaveChanges'),
+ cancelChanges : $("#phylovizNodeCancelChanges"),
+ name : $("#phyloVizSelectedNodeName"),
+ dist : $("#phyloVizSelectedNodeDist"),
+ annotation : $("#phyloVizSelectedNodeAnnotation")
+ };
+
+ self.valuesOfConcern =3D {
+ name : null,
+ dist : null,
+ annotation : null
+ }; // temporarily stores the values in case user change their mind
+
+ //init UI buttons
+ $("#nodeSelCloseBtn").off().on("click", function() { self.el.hide(=
); });
+ self.UI.saveChanges.off().on("click", function(){ self.updateNodes=
(); });
+ self.UI.cancelChanges.off().on("click", function(){ self.cancelCha=
nges(); });
+
+ (function ($) {
+ // extending jquery fxn for enabling and disabling nodes.
+ $.fn.enable =3D function (isEnabled) {
+ return $(this).each(function () {
+ if(isEnabled){
+ $(this).removeAttr('disabled');
+ } else {
+ $(this).attr('disabled', 'disabled');
+ }
+ });
+ };
+ })(jQuery);
+
+ self.UI.enableEdit.off().on("click", function () {
+ self.toggleUI();
+ });
+ },
+
+ toggleUI : function(){
+ /**
+ * For turning on and off the child elements
+ */
+ var self =3D this,
+ checked =3D self.UI.enableEdit.is(':checked');
+
+ !checked ? self.cancelChanges() : "";
+
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].enable(checked);
+ });
+ if(checked){
+ self.UI.saveChanges.show();
+ self.UI.cancelChanges.show();
+ } else {
+ self.UI.saveChanges.hide();
+ self.UI.cancelChanges.hide();
+ }
+
+ },
+
+ cancelChanges : function() {
+ /**
+ * Reverting to previous values in case user change their minds
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ $.each(self.valuesOfConcern, function(key, value) {
+ self.UI[key].val(node[key]);
+ });
+ }
+ },
+
+ updateNodes : function (){
+ /**
+ * Changing the data in the underlying tree with user-specified va=
lues
+ */
+ var self =3D this,
+ node =3D self.phyloTree.get("selectedNode");
+ if (node){
+ if (!self.isAcceptableValue(self.UI.dist, 0, 1) ||
+ self.hasIllegalJsonCharacters(self.UI.name) ||
+ self.hasIllegalJsonCharacters(self.UI.annotation) ) {
+ return;
+ }
+ $.each(self.valuesOfConcern, function(key, value) {
+ (node[key]) =3D self.UI[key].val();
+ });
+ self.phyloTree.set("nodeAttrChangedTime", new Date());
+ } else {
+ alert("No node selected");
+ }
+ }
+
+
+});
+
+
+
+var PhyloVizSearch =3D UserMenuBase.extend({
+ /**
+ * Initializes the search panel on phyloviz and handles its user inter=
action
+ * It allows user to search the entire free based on some qualifer, li=
ke dist <=3D val.
+ */
+ initialize : function () {
+ var self =3D this;
+
+ $("#phyloVizSearchBtn").on("click", function(){
+ var searchTerm =3D $("#phyloVizSearchTerm"),
+ searchConditionVal =3D $("#phyloVizSearchCondition").val()=
.split("-"),
+ attr =3D searchConditionVal[0],
+ condition =3D searchConditionVal[1];
+ self.hasIllegalJsonCharacters(searchTerm);
+
+ if (attr =3D=3D=3D "dist"){
+ self.isAcceptableValue(searchTerm, 0, 1);
+ }
+ self.searchTree(attr, condition, searchTerm.val());
+ });
+ },
+
+ searchTree : function (attr, condition, val){
+ /**
+ * Searches the entire tree and will highlight the nodes that matc=
h the condition in green
+ */
+ d3.selectAll("g.node")
+ .classed("searchHighlight", function(d){
+ var attrVal =3D d[attr];
+ if (typeof attrVal !=3D=3D "undefined" && attrVal !=3D=3D =
null){
+ if (attr =3D=3D=3D "dist"){
+ switch (condition) {
+ case "greaterEqual":
+ return attrVal >=3D +val;
+ case "lesserEqual":
+ return attrVal <=3D +val;
+ default:
+ return;
+ }
+
+ } else if (attr =3D=3D=3D "name" || attr =3D=3D=3D "an=
notation") {
+ return attrVal.toLowerCase().indexOf(val.toLowerCa=
se()) !=3D=3D -1;
+ }
+ }
+ });
+ }
+});
\ No newline at end of file
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -272,6 +272,17 @@
}
=20
init_trackster_links();
+
+ function init_phyloviz_links() {
+ // PhyloViz links
+ // Add to trackster browser functionality
+ $(".phyloviz-add").live("click", function() {
+ var dataset =3D this,
+ dataset_jquery =3D $(this);
+ window.parent.location =3D dataset_jquery.attr("new-url");
+ });
+ }
+ init_phyloviz_links();
=20
// History rename functionality.
async_save_text("history-name-container", "history-name", "${h.url_for=
( controller=3D"/history", action=3D"rename_async", id=3Dtrans.security.enc=
ode_id(history.id) )}", "new_name", 18);
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -29,6 +29,9 @@
## Render the dataset `data` as history item, using `hid` as the displayed=
id
<%def name=3D"render_dataset( data, hid, show_deleted_on_refresh =3D False=
, for_editing =3D True, display_structured =3D False )"><%
+
+ from galaxy.datatypes.xml import Phyloxml
+ from galaxy.datatypes.data import Newick, Nexus
dataset_id =3D trans.security.encode_id( data.id )
=20
if data.state in ['no state','',None]:
@@ -230,6 +233,14 @@
action-url=3D"${h.url_for( controller=3D't=
racks', action=3D'browser', dataset_id=3Ddataset_id)}"
new-url=3D"${h.url_for( controller=3D'trac=
ks', action=3D'index', dataset_id=3Ddataset_id, default_dbkey=3Ddata.dbkey)=
}" title=3D"View in Trackster"></a>
%endif
+ <%
+ isPhylogenyData =3D isinstance(data.datatype, =
(Phyloxml, Nexus, Newick))
+ %>
+ %if isPhylogenyData:
+ <a href=3D"javascript:void(0)" class=3D"i=
con-button chart_curve phyloviz-add"
+ action-url=3D"${h.url_for( controller=
=3D'phyloviz', action=3D'-', dataset_id=3Ddataset_id)}"
+ new-url=3D"${h.url_for( controller=3D'p=
hyloviz', action=3D'index', dataset_id=3Ddataset_id)}" title=3D"View in Phy=
loviz"></a>
+ %endif
%if trans.user:
%if not display_structured:
<div style=3D"float: right">
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 templates/visualization/phyloviz.mako
--- /dev/null
+++ b/templates/visualization/phyloviz.mako
@@ -0,0 +1,320 @@
+<%inherit file=3D"/webapps/galaxy/base_panels.mako"/>
+##
+<%def name=3D"init()">
+ <%
+ self.has_left_panel=3DFalse
+ self.has_right_panel=3DFalse
+ self.active_view=3D"visualization"
+ self.message_box_visible=3DFalse
+ %>
+</%def>
+
+<%def name=3D"stylesheets()">
+ ${parent.stylesheets()}
+ <style>
+
+ .node circle {
+ cursor: pointer;
+ fill: #fff;
+ stroke: steelblue;
+ stroke-width: 1.5px;
+ }
+
+ .node.searchHighlight circle {
+ stroke-width: 3px;
+ stroke: #7adc26;
+ }
+
+ .node.selectedHighlight circle {
+ stroke-width: 3px;
+ stroke: #dc143c;
+ }
+
+ path.link {
+ fill: none;
+ stroke: #B5BBFF;
+ stroke-width: 4.0px;
+ }
+
+
+ div #phyloVizNavContainer{
+ text-align: center;
+ width: 100%;
+ height: 0px;
+ }
+
+ div #phyloVizNav{
+ font-weight: bold;
+ display: inline-block;
+ background: transparent;
+ top: -2em;
+ position: relative;
+ }
+
+ div .navControl{
+ float: left;
+ }
+
+ div#FloatingMenu {
+ left: 0;
+ top: 15%;
+ width:20%;
+ z-index:100;
+ padding: 5px;
+
+ }
+
+ div#SettingsMenu {
+ width: 25%;
+ top: 350px;
+
+ }
+
+ div#nodeSelectionView {
+ width: 25%;
+ top:70px;
+ }
+
+ .Panel {
+ right: 0%;
+ z-index: 101;
+ position: fixed;
+
+ ## Borrowed from galaxy modal_dialogues
+ background-color: white;
+ border: 1px solid #999;
+ border: 1px solid rgba(0, 0, 0, 0.3);
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-border-radius: 6px;
+ -moz-border-radius: 6px;
+ border-radius: 6px;
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ -webkit-background-clip: padding-box;
+ -moz-background-clip: padding-box;
+ background-clip: padding-box;
+ }
+
+ span.PhylovizCloseBtn{
+ cursor: pointer;
+ float : right;
+ }
+
+ #PhyloViz{
+ width: 100%;
+ height: 95%;
+ }
+
+ h2.PhyloVizMenuTitle{
+ color: white;
+ }
+
+ ## Settings Menu
+ .SettingMenuRows{
+ margin: 2px 0 2px 0;
+ }
+
+
+ ## Helper Styles
+ .PhyloVizFloatLeft{
+ float : left;
+ }
+ .icon-button.zoom-in,.icon-button.zoom-out{display:inline-block;he=
ight:16px;width:16px;margin-bottom:-3px;cursor:pointer;}
+ .icon-button.zoom-out{background:transparent url(../images/fugue/m=
agnifier-zoom-out.png) center center no-repeat;}
+ .icon-button.zoom-in{margin-left:10px;background:transparent url(.=
./images/fugue/magnifier-zoom.png) center center no-repeat;}
+
+ </style>
+</%def>
+
+
+<%def name=3D"javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "galaxy.panels", "libs/d3", "mvc/data", "viz/visualization", "=
viz/phyloviz")}
+</%def>
+
+
+
+<%def name=3D"center_panel()">
+
+ <div class=3D"unified-panel-header" unselectable=3D"on">
+ <div class=3D"unified-panel-header-inner">
+ <div style=3D"float:left;" id=3D"title"></div>
+ <div style=3D"float:right;" id=3D"panelHeaderRightBtns"></div>
+ </div>
+ <div style=3D"clear: both"></div>
+ </div>
+
+
+ <div id=3D"phyloVizNavContainer">
+ <div id=3D"phyloVizNav">
+ %if config["ext"] =3D=3D "nex" and not config["saved_visualiza=
tion"]:
+ <div id =3D "phylovizNexInfo" class=3D"navControl">
+ <p>Select a tree to view:
+ <select id=3D"phylovizNexSelector">
+ % for tree, index in config["trees"]:
+ <option value=3D"${index}">${tree}</option>
+ % endfor
+ </select>
+ </p>
+ </div>
+ %endif
+ <div id=3D"phyloVizNavBtns" class=3D"navControl">
+ </div>
+ <div class=3D"navControl">
+ <p> | Alt+click to select nodes</p>
+ </div>
+
+
+ </div>
+
+ </div>
+
+ ## Node Selection Menu
+ <div id=3D"nodeSelectionView" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Search / Edit Nodes :
+ <span class=3D"PhylovizCloseBtn" id=3D"nodeSelCloseBtn"> X=
</span>
+ </h3>
+ </div>
+
+ <div class=3D"modal-body">
+
+ <div class=3D"SettingMenuRows">
+ Search for nodes with:
+ <select id=3D"phyloVizSearchCondition" style=3D"width: 55%=
">
+ <option value=3D"name-containing">Name (containing)</o=
ption>
+ <option value=3D"annotation-containing">Annotation (co=
ntaining)</option>
+ <option value=3D"dist-greaterEqual">Distance (>=3D)</o=
ption>
+ <option value=3D"dist-lesserEqual">Distance (<=3D)</op=
tion>
+ </select>
+ <input type=3D"text" id=3D"phyloVizSearchTerm" value=3D"N=
one" size=3D"15" displayLabel=3D"Distance">
+
+ <div class=3D"SettingMenuRows" style=3D"text-align: center=
;">
+ <button id=3D"phyloVizSearchBtn" > Search! </button>
+ </div>
+ </div>
+
+ <br/>
+
+ <div class=3D"SettingMenuRows">
+ Name: <input type=3D"text" id=3D"phyloVizSelectedNodeName"=
value=3D"None" size=3D"15" disabled=3D"disabled" >
+ </div>
+ <div class=3D"SettingMenuRows">
+ Dist: <input type=3D"text" id=3D"phyloVizSelectedNodeDist"=
value=3D"None" size=3D"15" disabled=3D"disabled" displayLabel=3D"Distance">
+ </div>
+ <div class=3D"SettingMenuRows">
+ Annotation:
+ <textarea id=3D"phyloVizSelectedNodeAnnotation" disabled=
=3D"disabled" ></textarea>
+ </div>
+ <div class=3D"SettingMenuRows">
+ Edit: <input type=3D"checkbox" id=3D"phylovizEditNodesChec=
k" value=3D"You can put custom annotations here and it will be saved">
+ <button id=3D"phylovizNodeSaveChanges" style=3D"display: n=
one;"> Save edits</button>
+ <button id=3D"phylovizNodeCancelChanges" style=3D"display:=
none;"> Cancel</button>
+ </div>
+ </div>
+ </div>
+
+ ## Settings Menus
+ <div id=3D"SettingsMenu" class=3D"Panel">
+ <div class=3D"modal-header">
+ <h3 class=3D"PhyloVizMenuTitle">Phyloviz Settings:
+ <span class=3D"PhylovizCloseBtn" id=3D"settingsCloseBtn"> =
X </span>
+ </h3>
+ </div>
+ <div class=3D"modal-body">
+ <div class=3D"SettingMenuRows">
+ Phylogenetic Spacing (px per unit): <input id=3D"phyloVizT=
reeSeparation" type=3D"text" value=3D"250" size=3D"10" displayLabel=3D"Phyl=
ogenetic Separation"> (50-2500)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Vertical Spacing (px): <input type=3D"text" id=3D"phyloViz=
TreeLeafHeight" value=3D"18" size=3D"10" displayLabel=3D"Vertical Spacing">=
(5-30)
+ </div>
+ <div class=3D"SettingMenuRows">
+ Font Size (px): <input type=3D"text" id=3D"phyloVizTreeFon=
tSize" value=3D"12" size=3D"4" displayLabel=3D"Font Size"> (5-20)
+ </div>
+
+ </div>
+ <div class=3D"modal-footer">
+ <button id=3D"phylovizResetSettingsBtn" class=3D"PhyloVizFloat=
Left" > Reset </button>
+ <button id=3D"phylovizApplySettingsBtn" class=3D"PhyloVizFloat=
Right" > Apply </button>
+ </div>
+ </div>
+
+
+
+
+
+
+ <div class=3D"Panel" id=3D"FloatingMenu" style=3D"display: None;">
+
+ <h2>PhyloViz (<a onclick=3D"displayHelp()" href=3D"javascript:void=
(0);">?</a>)</h2>
+ <div style=3D"display: none;">
+ <h2>Summary of Interactions and Functions:</h2>
+ <div class=3D"hint">1. Expansion of Nodes: click or option-cli=
ck to expand or collapse</div>
+ <div class=3D"hint">2. Zooming and translation: mousewheel, bu=
ttons, click and drag, double click. Reset</div>
+ <div class=3D"hint">3. Tooltip: Displays "Name and Size" on mo=
useOver on nodes</div>
+ <div class=3D"hint">4. Minimap: Currently displays an exact bu=
t scaled down replicate of the tree, orange bounding box is correct for lin=
ear only<br/>
+ Can be switched on or off</div>
+ <div class=3D"hint">5. Changing Layouts: Able to change betwee=
n circular and linear layouts.</div>
+
+ </div>
+
+ <h5>Scaling & Rotation:</h5>
+ <button id=3D"phylovizZoomInBtn" class=3D"" > + </button>
+ <button id=3D"phylovizZoomOutBtn" class=3D"" > - </button>
+
+
+ <h5>Translation:</h5>
+ <button id=3D"phylovizTranslateUpBtn" > Up </button>
+ <button id=3D"phylovizTranslateDownBtn" > Down </button>
+ <br/>
+ <button id=3D"phylovizTranslateLeftBtn" > Left </button>
+ <button id=3D"phylovizTranslateRightBtn" > Right </button>
+
+
+
+ <h5>Others:</h5>
+ <button id=3D"phylovizResetBtn" > Reset Zoom/Translate </button>
+ <button id=3D"phylovizSaveBtn" > Save vizualization </button>
+ <button id=3D"phylovizOpenSettingsBtn" > Settings </button>
+ </div>
+
+ <div id=3D"PhyloViz" >
+ </div>
+
+ <script type=3D"text/javascript">
+
+ function initPhyloViz(data, config) {
+ var phyloviz;
+
+ // -- Initialization code |-->
+ phyloviz =3D new PhylovizView({
+ data: data,
+ layout : "Linear",
+ config : config
+ });
+
+ // -- Render viz. --
+ phyloviz.render();
+
+ }
+
+ $(function firstVizLoad(){ // calls when viz is loaded for t=
he first time
+ var config =3D JSON.parse( '${ h.to_json_string( config )}');
+ var data =3D JSON.parse('${h.to_json_string(data)}');
+ initPhyloViz(data, config);
+ });
+
+ </script>
+
+</%def>
+
+
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 test-data/visualization/phyloviz/1_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/1_nexus.nex
@@ -0,0 +1,87 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on June 12, 2012; 23:00 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Olariaga I., Grebenc T., Salcedo I., & Mart=C3=ADn M.P. 2012. Two new spec=
ies of Hydnum
+with ovoid basidiospores: H. ovoideisporum and H. vesterholtii. Mycologia,=
.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S128=
31]
+
+BEGIN TREES;
+ TITLE Hydnum_ITS_result;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Hydnum_aff_ellipsosporum_RUFHYD1_AJ535304,
+ 2 Hydnum_albidum_ALB_AY817135,
+ 3 Hydnum_albidum_ALBHYD1_AJ534974,
+ 4 Hydnum_albomagnum_ALM_DQ218305,
+ 5 Hydnum_ellipsosporum_ELL_AY817138,
+ 6 Hydnum_ellipsosporum_RUFHYD8_AJ547882,
+ 7 Hydnum_ovoidisporum_12317BIOFungi,
+ 8 Hydnum_ovoidisporum_12683BIOFungi,
+ 9 Hydnum_ovoidisporum_12902BIOFungi,
+ 10 Hydnum_ovoidisporum_14130BIOFungi,
+ 11 Hydnum_repandum_RE1_REP1_AJ889978,
+ 12 Hydnum_repandum_RE1_REP2_AJ889949,
+ 13 Hydnum_repandum_RE1_REP3_AY817136,
+ 14 Hydnum_repandum_RE1_REP6_UDB000025,
+ 15 Hydnum_repandum_RE1_REP7_UDB000096,
+ 16 Hydnum_repandum_RE1_REP8_UDB001479,
+ 17 Hydnum_repandum_RE1_REPHYD10_AJ547888,
+ 18 Hydnum_repandum_RE1_REPHYD11_AJ547886,
+ 19 Hydnum_repandum_RE1_REPHYD1_AJ547871,
+ 20 Hydnum_repandum_RE1_REPHYD3_AJ547874,
+ 21 Hydnum_repandum_RE1_REPHYD4_AJ547876,
+ 22 Hydnum_repandum_RE1_REPHYD5_AJ547875,
+ 23 Hydnum_repandum_RE1_REPHYD6_AJ547877,
+ 24 Hydnum_repandum_RE1_REPHYD7_AJ547878,
+ 25 Hydnum_repandum_RE1_REPHYD8_AJ547881,
+ 26 Hydnum_repandum_RE1_REPHYD9_AJ547883,
+ 27 Hydnum_repandum_RE1_RUFHYD10_AJ547866,
+ 28 Hydnum_repandum_RE1_RUFHYD11_AJ547889,
+ 29 Hydnum_repandum_RE1_RUFHYD9_AJ535305,
+ 30 Hydnum_rufescens_RU1_RUFHYD5_AJ547869,
+ 31 Hydnum_rufescens_RU1_RUFHYD6_AJ547884,
+ 32 Hydnum_rufescens_RU1_RUFHYD7_AJ547870,
+ 33 Hydnum_rufescens_RU2_REP5_DQ367902,
+ 34 Hydnum_rufescens_RU2_RUFHYD2_AJ535301,
+ 35 Hydnum_rufescens_RU3_12901BIOFungi,
+ 36 Hydnum_rufescens_RU3_REP4_DQ218306,
+ 37 Hydnum_rufescens_RU3_RUFHYD3_AJ535303,
+ 38 Hydnum_rufescens_RU3_RUFHYD4_AJ535302,
+ 39 Hydnum_rufescens_RU4_RUFHYD12_AJ839969,
+ 40 Hydnum_rufescens_RU4_RUFHYD16_AJ547868,
+ 41 Hydnum_rufescens_RU4_RUFHYD17_AJ547885,
+ 42 Hydnum_rufescens_RU4_UMB1_DQ367903,
+ 43 Hydnum_rufescens_RU5_12760BIOFungi,
+ 44 Hydnum_rufescens_RU5_ALBHYD2_AJ534975,
+ 45 Hydnum_rufescens_RU5_RUF2_DQ658890,
+ 46 Hydnum_rufescens_RU5_RUF4_UDB001465,
+ 47 Hydnum_rufescens_RU5_RUF5_UDB002423,
+ 48 Hydnum_rufescens_RU5_RUFHYD14_AJ547872,
+ 49 Hydnum_rufescens_RU6_RUF1_AY817137,
+ 50 Hydnum_rufescens_RU6_RUFHYD15_AJ547867,
+ 51 Hydnum_rufescens_wrong_taxonomy_RUF3_AM087246,
+ 52 Hydnum_umbilicatum_UMBHYD1_AJ534972,
+ 53 Hydnum_umbilicatum_UMBHYD2_AJ534973,
+ 54 Hydnum_vesterholtii_10429BIOFungi,
+ 55 Hydnum_vesterholtii_10452BIOFungi,
+ 56 Hydnum_vesterholtii_12330BIOFungi,
+ 57 Hydnum_vesterholtii_12904BIOFungi,
+ 58 Hydnum_vesterholtii_REPHYD12A_AJ547879,
+ 59 Hydnum_vesterholtii_REPHYD12C_AJ783968,
+ 60 Hydnum_vesterholtii_REPHYD13_AJ547887,
+ 61 Sistotrema_muscicola_AJ606040,
+ 62 Sistotrema_alboluteum_AJ606042;
+ TREE Fig._2 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(2:100.0,3:100.0):100.0):60.639999,(((56:100.0,58:100.0,59:100.0=
):84.639999,(54:100.0,55:100.0,57:100.0,60:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(11:100.0,12:100.0,13:100.0,14:100.0,15:100.0,1=
6:100.0,17:100.0,18:100.0,19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,24:1=
00.0,25:100.0,26:100.0):99.93):68.690002,(((33:100.0,34:100.0):49.8050005,(=
35:100.0,36:100.0,37:100.0,38:100.0):99.989998):49.8050005,((7:100.0,8:100.=
0,9:100.0,10:100.0):100.0,(42:100.0,(39:100.0,40:100.0,41:100.0):98.449997)=
:86.790001,((52:100.0,53:100.0):99.93,(1:100.0,(5:97.47999949999999,6:100.0=
):97.47999949999999):100.0):53.310001,(27:100.0,(28:100.0,29:100.0,49:100.0=
,50:100.0):47.404999):47.404999,(43:100.0,44:100.0,45:100.0,46:100.0,47:100=
.0,48:100.0):99.459999):29.245001):29.245001):51.580002):61.540001):49.66);
+ TREE PAUP_1 =3D [&R] ((62:100.0,(51:100.0,61:100.0):93.269997):49.66=
,((4:100.0,(3:100.0,2:100.0):100.0):60.639999,(((58:100.0,59:100.0,56:100.0=
):84.639999,(60:100.0,54:100.0,55:100.0,57:100.0):98.330002):92.5,(((30:100=
.0,31:100.0,32:100.0):100.0,(19:100.0,20:100.0,21:100.0,22:100.0,23:100.0,2=
4:100.0,25:100.0,26:100.0,17:100.0,18:100.0,11:100.0,12:100.0,13:100.0,14:1=
00.0,15:100.0,16:100.0):99.93):68.690002,((34:100.0,33:100.0):99.610001,(37=
:100.0,38:100.0,35:100.0,36:100.0):99.989998,(42:100.0,(39:100.0,41:100.0,4=
0:100.0):98.449997):86.790001,(8:100.0,7:100.0,9:100.0,10:100.0):100.0,((52=
:100.0,53:100.0):99.93,(1:100.0,(5:100.0,6:100.0):94.959999):100.0):53.3100=
01,(29:100.0,27:100.0,28:100.0,50:100.0,49:100.0):94.809998,(44:100.0,43:10=
0.0,48:100.0,45:100.0,46:100.0,47:100.0):99.459999):58.490002):51.580002):6=
1.540001):49.66);
+
+
+
+END;
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 test-data/visualization/phyloviz/2_nexus.nex
--- /dev/null
+++ b/test-data/visualization/phyloviz/2_nexus.nex
@@ -0,0 +1,96 @@
+#NEXUS
+
+[!This data set was downloaded from TreeBASE, a relational database of phy=
logenetic knowledge. TreeBASE has been supported by the NSF, Harvard Univer=
sity, Yale University, SDSC and UC Davis. Please do not remove this acknowl=
edgment from the Nexus file.
+
+
+Generated on August 18, 2012; 12:14 GMT
+
+TreeBASE (cc) 1994-2008
+
+Study reference:
+Naish D., Dyke G., Cau A., & Escuilli=C3=A9 F. 2012. A gigantic bird from =
the Upper Cretaceous
+of Central Asia. Biology Letters, 8(1): 97-100.
+
+TreeBASE Study URI: http://purl.org/phylo/treebase/phylows/study/TB2:S130=
08]
+
+BEGIN TREES;
+ TITLE Imported_trees;
+ LINK TAXA =3D Taxa1;
+ TRANSLATE
+ 1 Herrerasaurus,
+ 2 Tawa,
+ 3 Allosaurus,
+ 4 Alvarezsaurus,
+ 5 Anchiornis,
+ 6 Archaeopteryx,
+ 7 Archaeorhynchus,
+ 8 Avimimus,
+ 9 Baryonyx,
+ 10 Beipiaosaurus,
+ 11 Caenagnathus,
+ 12 Caudipteryx,
+ 13 Ceratosaurus,
+ 14 Chirostenotes,
+ 15 Citipati,
+ 16 Compsognathus,
+ 17 Confuciusornis,
+ 18 Dilong,
+ 19 Dilophosaurus,
+ 20 Epidendrosaurus,
+ 21 Epidexipteryx,
+ 22 Erlicosaurus,
+ 23 Eustreptospondylus,
+ 24 Gallimimus,
+ 25 Garudimimus,
+ 26 Gobipteryx,
+ 27 Guanlong,
+ 28 Haplocheirus,
+ 29 Harpymimus,
+ 30 Hebeiornis,
+ 31 Hongshanornis,
+ 32 Huoshanornis,
+ 33 Iberomesornis,
+ 34 Ichthyornis,
+ 35 Incisivosaurus,
+ 36 Jeholornis,
+ 37 Limusaurus,
+ 38 Longicrusavis,
+ 39 Longipteryx,
+ 40 Longirostravis,
+ 41 Majungasaurus,
+ 42 Masiakasaurus,
+ 43 Monolophosaurus,
+ 44 Mononykus,
+ 45 Neornithes,
+ 46 Ornitholestes,
+ 47 Ornithomimus,
+ 48 Patagonykus,
+ 49 Patagopteryx,
+ 50 Pelecanimimus,
+ 51 Pengornis,
+ 52 Protarchaeopteryx,
+ 53 Protopteryx,
+ 54 Rinchenia,
+ 55 Sapeornis,
+ 56 Segnosaurus,
+ 57 Shenzhousaurus,
+ 58 Shuvuuia,
+ 59 Sinornithosaurus,
+ 60 Sinosauropteryx,
+ 61 Sinovenator,
+ 62 Sinraptor,
+ 63 Syntarsus_kayentakatae,
+ 64 Troodon,
+ 65 Tyrannosaurus,
+ 66 Velociraptor,
+ 67 Yanornis,
+ 68 Yixianornis,
+ 69 Zhongjianornis,
+ 70 Zhongornis,
+ 71 Zuolong,
+ 72 Samrukia;
+ TREE Figure_1A =3D [&R] (1,(2,(((((43,(3,62)),(71,((46,((((28,(4,(48=
,(44,58)))),((((5,(61,(64,(59,66)))),(6,((36,(55,(69,(((7,34,45,49,72,(31,3=
8),(67,68)),(33,((32,((26,30),(39,40))),(51,53)))),(17,70))))),(20,21)))),(=
(11,(12,(8,(14,(15,54))))),(35,52))),(10,(22,56)))),(50,(57,(29,(25,(24,47)=
))))),(16,60))),(27,(18,65))))),(9,23)),(13,(41,(37,42)))),(19,63))));
+
+
+
+END;
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 test-data/visualization/phyloviz/3_phyloxml.xml
--- /dev/null
+++ b/test-data/visualization/phyloviz/3_phyloxml.xml
@@ -0,0 +1,257 @@
+<?xml version=3D"1.0" encoding=3D"UTF-8"?>
+<phyloxml xmlns:xsi=3D"http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation=3D"http://www.phyloxml.org http://www.phyloxml.org/1=
.10/phyloxml.xsd"
+ xmlns=3D"http://www.phyloxml.org">
+ <phylogeny rooted=3D"true">
+ <clade>
+ <clade>
+ <branch_length>0.18105</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.07466</branch_length>
+ <confidence type=3D"unknown">32.0</confidence>
+ <clade>
+ <branch_length>0.26168</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.22058</branch_length>
+ <confidence type=3D"unknown">89.0</confidence>
+ <clade>
+ <branch_length>0.28901</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.06584</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.02309</branch_length>
+ <confidence type=3D"unknown">43.0</confidenc=
e>
+ <clade>
+ <branch_length>0.0746</branch_length>
+ <confidence type=3D"unknown">100.0</confi=
dence>
+ <clade>
+ <branch_length>0.02365</branch_length>
+ <confidence type=3D"unknown">88.0</con=
fidence>
+ <clade>
+ <name>22_MOUSE</name>
+ <branch_length>0.05998</branch_leng=
th>
+ <taxonomy>
+ <code>MOUSE</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>Apaf-1_HUMAN</name>
+ <branch_length>0.01825</branch_leng=
th>
+ <taxonomy>
+ <code>HUMAN</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>12_CANFA</name>
+ <branch_length>0.04683</branch_length>
+ <taxonomy>
+ <code>CANFA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>11_CHICK</name>
+ <branch_length>0.15226</branch_length>
+ <taxonomy>
+ <code>CHICK</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>16_XENLA</name>
+ <branch_length>0.4409</branch_length>
+ <taxonomy>
+ <code>XENLA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.17031</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <branch_length>0.10929</branch_length>
+ <confidence type=3D"unknown">100.0</confiden=
ce>
+ <clade>
+ <name>14_FUGRU</name>
+ <branch_length>0.02255</branch_length>
+ <taxonomy>
+ <code>FUGRU</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>15_TETNG</name>
+ <branch_length>0.09478</branch_length>
+ <taxonomy>
+ <code>TETNG</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>17_BRARE</name>
+ <branch_length>0.1811</branch_length>
+ <taxonomy>
+ <code>BRARE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.01594</branch_length>
+ <confidence type=3D"unknown">53.0</confidence>
+ <clade>
+ <branch_length>0.10709</branch_length>
+ <confidence type=3D"unknown">68.0</confidence>
+ <clade>
+ <name>1_BRAFL</name>
+ <branch_length>0.26131</branch_length>
+ <taxonomy>
+ <code>BRAFL</code>
+ </taxonomy>
+ </clade>
+ <clade>
+ <name>18_NEMVE</name>
+ <branch_length>0.38014</branch_length>
+ <taxonomy>
+ <code>NEMVE</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ <clade>
+ <name>23_STRPU</name>
+ <branch_length>0.48179</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.34475</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>26_STRPU</name>
+ <branch_length>0.36374</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1319">
+ <domain from=3D"18" to=3D"98" confidence=3D"=
3.4E-5">Death</domain>
+ <domain from=3D"189" to=3D"481" confidence=
=3D"1.8E-10">NB-ARC</domain>
+ <domain from=3D"630" to=3D"668" confidence=
=3D"8.2E-5">WD40</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>25_STRPU</name>
+ <branch_length>0.33137</branch_length>
+ <taxonomy>
+ <code>STRPU</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1947">
+ <domain from=3D"143" to=3D"227" confidence=
=3D"7.4E-5">Death</domain>
+ <domain from=3D"227" to=3D"550" confidence=
=3D"2.0E-13">NB-ARC</domain>
+ <domain from=3D"697" to=3D"736" confidence=
=3D"7.9E-4">WD40</domain>
+ <domain from=3D"745" to=3D"785" confidence=
=3D"1.5">WD40</domain>
+ <domain from=3D"1741" to=3D"1836" confidence=
=3D"2.0">Adeno_VII</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>1.31498</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>CED4_CAEEL</name>
+ <branch_length>0.13241</branch_length>
+ <taxonomy>
+ <code>CAEEL</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"714">
+ <domain from=3D"7" to=3D"90" confidence=3D"9.2E=
-14">CARD</domain>
+ <domain from=3D"116" to=3D"442" confidence=3D"5=
.8E-151">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>31_CAEBR</name>
+ <branch_length>0.04777</branch_length>
+ <taxonomy>
+ <code>CAEBR</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"554">
+ <domain from=3D"1" to=3D"75" confidence=3D"0.00=
46">CARD</domain>
+ <domain from=3D"101" to=3D"427" confidence=3D"2=
.1E-123">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ </clade>
+ <clade>
+ <branch_length>0.13172</branch_length>
+ <confidence type=3D"unknown">45.0</confidence>
+ <clade>
+ <branch_length>0.24915</branch_length>
+ <confidence type=3D"unknown">95.0</confidence>
+ <clade>
+ <branch_length>0.76898</branch_length>
+ <confidence type=3D"unknown">100.0</confidence>
+ <clade>
+ <name>28_DROPS</name>
+ <branch_length>0.1732</branch_length>
+ <taxonomy>
+ <code>DROPS</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"535">
+ <domain from=3D"112" to=3D"399" confidence=
=3D"1.4E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ <clade>
+ <name>Dark_DROME</name>
+ <branch_length>0.18863</branch_length>
+ <taxonomy>
+ <code>DROME</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"1421">
+ <domain from=3D"108" to=3D"397" confidence=
=3D"2.1E-5">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>29_AEDAE</name>
+ <branch_length>0.86398</branch_length>
+ <taxonomy>
+ <code>AEDAE</code>
+ </taxonomy>
+ <sequence>
+ <domain_architecture length=3D"423">
+ <domain from=3D"109" to=3D"421" confidence=3D"9=
.3E-6">NB-ARC</domain>
+ </domain_architecture>
+ </sequence>
+ </clade>
+ </clade>
+ <clade>
+ <name>30_TRICA</name>
+ <branch_length>0.97698</branch_length>
+ <taxonomy>
+ <code>TRICA</code>
+ </taxonomy>
+ </clade>
+ </clade>
+ </clade>
+ </clade>
+ </phylogeny>
+</phyloxml>
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 test-data/visualization/phyloviz/4_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/4_newick.nhx
@@ -0,0 +1,33 @@
+(((BGIOSIBCA028421_ORYSA:0.423485[&&NHX:S=3DORYSA:O=3DBGIOSIBCA028421.1:G=
=3DBGIOSIBCA028421],
+At5g41150_ARATH:0.273135[&&NHX:S=3DARATH:O=3DAt5g41150.1:G=3DAt5g41150]
+):0.690991[&&NHX:S=3DMagnoliophyta:D=3DN:B=3D100],
+(rad16_SCHPO:0.718598[&&NHX:S=3DSCHPO:O=3DSPCC970.01:G=3DSPCC970.01],
+RAD1_YEAST:1.05456[&&NHX:S=3DYEAST:O=3DYPL022W.1:G=3DYPL022W]
+):0.344838[&&NHX:S=3DAscomycota:D=3DN:B=3D100]
+):0.103849[&&NHX:S=3DEukaryota:D=3DN:B=3D61],
+((((((((ERCC4_HUMAN:0.067531[&&NHX:S=3DHUMAN:O=3DENST00000311895.3:G=3DENS=
G00000175595],
+Ercc4_MOUSE:0.17422[&&NHX:S=3DMOUSE:O=3DENSMUST00000023206.5:G=3DENSMUSG00=
000022545]
+):0.065513[&&NHX:S=3DEuarchontoglires:D=3DN:B=3D100],
+ENSMODT00000006086_MONDO:0.104633[&&NHX:S=3DMONDO:O=3DENSMODT00000006086.2=
:G=3DENSMODG00000004840]
+):0.083764[&&NHX:S=3DTheria:D=3DN:B=3D100],
+Q5ZJP8_CHICK:0.153132[&&NHX:S=3DCHICK:O=3DENSGALT00000004716.2:G=3DENSGALG=
00000002981]
+):0.057998[&&NHX:S=3DAmniota:D=3DN:B=3D100],
+ENSXETT00000024054_XENTR:0.288632[&&NHX:S=3DXENTR:O=3DENSXETT00000024054.2=
:G=3DENSXETG00000010991]
+):0.075713[&&NHX:S=3DTetrapoda:D=3DN:B=3D100],
+(zgc-63468_BRARE:0.2218[&&NHX:S=3DBRARE:O=3DENSDART00000015780.4:G=3DENSDA=
RG00000014161],
+NEWSINFRUT00000137921_FUGRU:0.220441[&&NHX:S=3DFUGRU:O=3DNEWSINFRUT0000013=
7921.3:G=3DNEWSINFRUG00000130312]
+):0.170605[&&NHX:S=3DClupeocephala:D=3DN:B=3D100]
+):0.238713[&&NHX:S=3DEuteleostomi:D=3DN:B=3D100],
+ENSCINT00000011737_CIOIN:0.623567[&&NHX:S=3DCIOIN:O=3DENSCINT00000011737.2=
:G=3DENSCING00000005673]
+):0.07499[&&NHX:S=3DChordata:D=3DN:B=3D100],
+(Sm00.scaff00195.0600_SCHMA:0.784609[&&NHX:S=3DSCHMA:O=3DSm00.scaff00195.0=
600:G=3DSm00.scaff00195.0600],
+(CBG03141_CAEBR:0.093703[&&NHX:S=3DCAEBR:O=3DCBG03141:G=3DCBG03141],
+NP_496498_CAEEL:0.212236[&&NHX:S=3DCAEEL:O=3DC47D12.8.1:G=3DC47D12.8]
+):1.47416[&&NHX:S=3DCaenorhabditis:D=3DN:B=3D94]
+):0.26906[&&NHX:S=3DBilateria:D=3DN:B=3D97]
+):0.071406[&&NHX:S=3DBilateria:D=3DN:B=3D1],
+(mei-9-RA_DROME:0.170289[&&NHX:S=3DDROME:O=3DCG3697-RA.3:G=3DCG3697],
+GA17620-PA_DROPS:0.154817[&&NHX:S=3DDROPS:O=3DGA17620-PA:G=3DGA17620]
+):0.818474[&&NHX:S=3DSophophora:D=3DN:B=3D100]
+):0
+)[&&NHX:S=3DEukaryota:D=3DN];
\ No newline at end of file
diff -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 -r 75a03bacdc7a3dc5b1c03f8=
b02df0ab383366955 test-data/visualization/phyloviz/5_newick.nhx
--- /dev/null
+++ b/test-data/visualization/phyloviz/5_newick.nhx
@@ -0,0 +1,1 @@
+(CAE_ELE_PORCN:0.303421 ,((((DRO_PER_PORCN:0.001000 ,DRO_PSE_PORCN:0.00100=
0 )67:0.141994 ,(DRO_ANA_PORCN:0.111899 ,(DRO_ERE_PORCN:0.030516 ,(DRO_MEL_=
PORCN:0.021127 ,DRO_SEC_PORCN:0.021127 )38:0.030516 )35:0.111899 )18:0.1419=
94 )16:0.162611 ,(DRO_WIL_PORCN:0.152225 ,(DRO_VIR_PORCN:0.085057 ,DRO_MOJ_=
PORCN:0.085057 )24:0.152225 )15:0.162611 )13:0.295081 ,(ANO_GAM_PORCN:0.287=
545 ,((CIO_INT_PORCN:0.100686 ,CIO_SAV_PORCN:0.100686 )19:0.275542 ,((LOA_L=
OA_PORCN:0.036278 ,BRU_MAL_PORCN:0.036278 )29:0.272631 ,(((((DAN_RER_PORCN:=
0.086499 ,((TAK_RUB_PORCN:0.032609 ,TET_NIG_PORCN:0.032609 )32:0.048864 ,(G=
AD_MOR_PORCN:0.039387 ,(ORY_LAT_PORCN:0.031729 ,(GAS_ACU_PORCN:0.021882 ,OR=
E_NIL_PORCN:0.021882 )37:0.031729 )34:0.039387 )28:0.048864 )27:0.086499 )2=
3:0.119618 ,(LAT_CHA_PORCN:0.099348 ,((XEN_LAE_PORCN:0.033333 ,XEN_TRO_PORC=
N:0.033333 )31:0.091250 ,(ANO_CAR_PORCN:0.086538 ,((MON_DOM_PORCN:0.014100 =
,(MAC_EUG_PORCN:0.005423 ,SAR_HAR_PORCN:0.005423 )57:0.014100 )42:0.062862 =
,(ORN_ANA_PORCN:0.057974 ,(GOR_GOR_PORCN:0.033876 ,(FEL_CAT_PORCN:0.022851 =
,(PRO_CAP_PORCN:0.019716 ,(CAV_POR_PORCN:0.018599 ,(ERI_EUR_PORCN:0.015518 =
,((DIP_ORD_PORCN:0.007231 ,(MUS_MUS_PORCN:0.001085 ,(RAT_NOR_PORCN:0.001000=
,CRI_GRI_PORCN:0.001000 )69:0.001085 )64:0.007231 )53:0.012954 ,(DAS_NOV_P=
ORCN:0.011362 ,(LOX_AFR_PORCN:0.010575 ,(CAL_JAC_PORCN:0.010332 ,(OCH_PRI_P=
ORCN:0.010063 ,(MIC_MUR_PORCN:0.009123 ,(SUS_SCR_PORCN:0.008880 ,(MYO_LUC_P=
ORCN:0.008460 ,((CAN_FAM_PORCN:0.005423 ,AIL_MEL_PORCN:0.005423 )58:0.00809=
3 ,((PTE_VAM_PORCN:0.006508 ,BOS_TAU_PORCN:0.006508 )55:0.007494 ,((SPE_TRI=
_PORCN:0.003254 ,TUP_BEL_PORCN:0.003254 )61:0.006929 ,((OTO_GAR_PORCN:0.001=
085 ,(ORY_CUN_PORCN:0.001000 ,TUR_TRU_PORCN:0.001000 )68:0.001085 )65:0.005=
965 ,(EQU_CAB_PORCN:0.003688 ,(MAC_MUL_PORCN:0.002711 ,(PAN_TRO_PORCN:0.001=
446 ,(HOM_SAP_PORCN:0.001085 ,(PON_ABE_PORCN:0.001000 ,NOM_LEU_PORCN:0.0010=
00 )70:0.001085 )66:0.001446 )63:0.002711 )62:0.003688 )60:0.005965 )56:0.0=
06929 )54:0.007494 )52:0.008093 )51:0.008460 )50:0.008880 )49:0.009123 )48:=
0.010063 )47:0.010332 )46:0.010575 )45:0.011362 )44:0.012954 )43:0.015518 )=
41:0.018599 )40:0.019716 )39:0.022851 )36:0.033876 )30:0.057974 )26:0.06286=
2 )25:0.086538 )22:0.091250 )21:0.099348 )20:0.119618 )17:0.214465 ,(BRA_FL=
O_PORCN:0.189220 ,SAC_KOW_PORCN:0.189220 )12:0.214465 )11:0.257058 ,(NEM_VE=
C_PORCN:0.246631 ,AMP_QUE_PORCN:0.246631 )9:0.257058 )8:0.266904 ,(TRI_CAS_=
PORCN:0.259494 ,(PED_HUM_PORCN:0.227009 ,(NAS_VIT_PORCN:0.160241 ,(API_MEL_=
PORCN:0.031851 ,(BOM_TER_PORCN:0.004808 ,BOM_IMP_PORCN:0.004808 )59:0.03185=
1 )33:0.160241 )14:0.227009 )10:0.259494 )7:0.266904 )6:0.272631 )5:0.27554=
2 )4:0.287545 )3:0.295081 )2:0.303421 )1:0.0001;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: inithello: Fixed truncation of downloaded fasta files.
by Bitbucket 27 Aug '12
by Bitbucket 27 Aug '12
27 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/89dbce43ba88/
changeset: 89dbce43ba88
user: inithello
date: 2012-08-27 18:52:43
summary: Fixed truncation of downloaded fasta files.
affected #: 1 file
diff -r 90aa7ae565d60d38c90f444322a68b55fc895701 -r 89dbce43ba88afdc3e6265feea7e7e042bb030a7 lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -115,15 +115,16 @@
files = tar.getmembers()
for filename in files:
z = tar.extractfile(filename)
- try:
- chunk = z.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- log.error( 'Problem decompressing compressed data' )
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
+ while 1:
+ try:
+ chunk = z.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ log.error( 'Problem decompressing compressed data' )
+ exit()
+ if not chunk:
+ break
+ os.write( fd, chunk )
os.write( fd, '\n' )
os.close( fd )
tar.close()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: afgane: style: explicitly define a color for the masthead highlight (i.e., gradient top)
by Bitbucket 26 Aug '12
by Bitbucket 26 Aug '12
26 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/90aa7ae565d6/
changeset: 90aa7ae565d6
user: afgane
date: 2012-08-27 02:57:31
summary: style: explicitly define a color for the masthead highlight (i.e., gradient top)
affected #: 2 files
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r 90aa7ae565d60d38c90f444322a68b55fc895701 static/june_2007_style/blue_colors.ini
--- a/static/june_2007_style/blue_colors.ini
+++ b/static/june_2007_style/blue_colors.ini
@@ -55,6 +55,7 @@
peek_table_header=#023858
# Masthead
masthead_bg=#2C3143
+masthead_bg_highlight=#333
masthead_text=#eeeeee
masthead_bg_hatch=-
masthead_link=#eeeeee
diff -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec -r 90aa7ae565d60d38c90f444322a68b55fc895701 static/june_2007_style/galaxy_bootstrap_variables.less
--- a/static/june_2007_style/galaxy_bootstrap_variables.less
+++ b/static/june_2007_style/galaxy_bootstrap_variables.less
@@ -144,7 +144,7 @@
// -------------------------
@navbarHeight: 32px;
@navbarBackground: @masthead_bg;
-@navbarBackgroundHighlight: @grayDark;
+@navbarBackgroundHighlight: @masthead_bg_highlight;
@navbarText: @grayLight;
@navbarLinkColor: @grayLight;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Do not use enable_tracks or enable_pages options anymore; visualizations and pages are enabled for all instances.
by Bitbucket 24 Aug '12
by Bitbucket 24 Aug '12
24 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b1f2c51d6bd8/
changeset: b1f2c51d6bd8
user: jgoecks
date: 2012-08-25 00:43:33
summary: Do not use enable_tracks or enable_pages options anymore; visualizations and pages are enabled for all instances.
affected #: 3 files
diff -r 3d32fa091951e9aa72f3c69272960b3f6b648722 -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -218,7 +218,7 @@
%if for_editing:
<a href="${h.url_for( controller='tool_runner', action='rerun', id=data.id )}" target="galaxy_main" title='${_("Run this job again")}' class="icon-button arrow-circle tooltip"></a>
- %if app.config.get_bool( 'enable_tracks', False ) and data.ext in app.datatypes_registry.get_available_tracks():
+ %if data.ext in app.datatypes_registry.get_available_tracks():
<%
if data.dbkey != '?':
data_url = h.url_for( controller='tracks', action='list_tracks', dbkey=data.dbkey )
diff -r 3d32fa091951e9aa72f3c69272960b3f6b648722 -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -78,12 +78,10 @@
[ _('Data Libraries'), h.url_for( controller='/library', action='index') ],
None,
[ _('Published Histories'), h.url_for( controller='/history', action='list_published' ) ],
- [ _('Published Workflows'), h.url_for( controller='/workflow', action='list_published' ) ]
- ]
- if app.config.get_bool( 'enable_tracks', False ):
- menu_options.append( [ _('Published Visualizations'), h.url_for( controller='/visualization', action='list_published' ) ] )
- if app.config.get_bool( 'enable_pages', False ):
- menu_options.append( [ _('Published Pages'), h.url_for( controller='/page', action='list_published' ) ] )
+ [ _('Published Workflows'), h.url_for( controller='/workflow', action='list_published' ) ],
+ [ _('Published Visualizations'), h.url_for( controller='/visualization', action='list_published' ) ],
+ [ _('Published Pages'), h.url_for( controller='/page', action='list_published' ) ]
+ ]
tab( "shared", _("Shared Data"), h.url_for( controller='/library', action='index'), menu_options=menu_options )
%>
@@ -98,15 +96,13 @@
%>
## Visualization menu.
- %if app.config.get_bool( 'enable_tracks', False ):
- <%
- menu_options = [
- [_('New Visualization'), h.url_for( controller='/tracks', action='index' ) ],
- [_('Saved Visualizations'), h.url_for( controller='/visualization', action='list' ) ]
- ]
- tab( "visualization", _("Visualization"), h.url_for( controller='/visualization', action='list'), menu_options=menu_options )
- %>
- %endif
+ <%
+ menu_options = [
+ [_('New Visualization'), h.url_for( controller='/tracks', action='index' ) ],
+ [_('Saved Visualizations'), h.url_for( controller='/visualization', action='list' ) ]
+ ]
+ tab( "visualization", _("Visualization"), h.url_for( controller='/visualization', action='list'), menu_options=menu_options )
+ %>
## Cloud menu.
%if app.config.get_bool( 'enable_cloud_launch', False ):
@@ -155,8 +151,7 @@
menu_options.append( [ _('Logout'), app.config.remote_user_logout_href, "_top" ] )
else:
menu_options.append( [ _('Preferences'), h.url_for( controller='/user', action='index', cntrller='user', webapp='galaxy' ), "galaxy_main" ] )
- if app.config.get_bool( 'enable_tracks', False ):
- menu_options.append( [ 'Custom Builds', h.url_for( controller='/user', action='dbkeys' ), "galaxy_main" ] )
+ menu_options.append( [ 'Custom Builds', h.url_for( controller='/user', action='dbkeys' ), "galaxy_main" ] )
if app.config.require_login:
logout_url = h.url_for( controller='/root', action='index', m_c='user', m_a='logout', webapp='galaxy' )
else:
@@ -165,8 +160,7 @@
menu_options.append( None )
menu_options.append( [ _('Saved Histories'), h.url_for( controller='/history', action='list' ), "galaxy_main" ] )
menu_options.append( [ _('Saved Datasets'), h.url_for( controller='/dataset', action='list' ), "galaxy_main" ] )
- if app.config.get_bool( 'enable_pages', False ):
- menu_options.append( [ _('Saved Pages'), h.url_for( controller='/page', action='list' ), "_top" ] )
+ menu_options.append( [ _('Saved Pages'), h.url_for( controller='/page', action='list' ), "_top" ] )
menu_options.append( [ _('API Keys'), h.url_for( controller='/user', action='api_keys', cntrller='user', webapp='galaxy' ), "galaxy_main" ] )
if app.config.use_remote_user:
menu_options.append( [ _('Public Name'), h.url_for( controller='/user', action='edit_username', cntrller='user', webapp='galaxy' ), "galaxy_main" ] )
diff -r 3d32fa091951e9aa72f3c69272960b3f6b648722 -r b1f2c51d6bd8d8b1aecce8f62304cc2df278ccec universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -506,15 +506,6 @@
# Enable Galaxy to communicate directly with a sequencer
#enable_sequencer_communication = False
-# Enable Galaxy's built-in visualization module, Trackster.
-#enable_tracks = True
-
-# Enable Galaxy Pages. Pages are custom webpages that include embedded Galaxy items,
-# such as datasets, histories, workflows, and visualizations; pages are useful for
-# documenting and sharing multiple analyses or workflows. Pages are created using a
-# WYSIWYG editor that is very similar to a word processor.
-#enable_pages = True
-
# Enable authentication via OpenID. Allows users to log in to their Galaxy
# account by authenticating with an OpenID provider.
#enable_openid = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Enable function to be used for generating a grid operation link; use this functionality to generate link for viewing/editing visualization.
by Bitbucket 24 Aug '12
by Bitbucket 24 Aug '12
24 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3d32fa091951/
changeset: 3d32fa091951
user: jgoecks
date: 2012-08-25 00:15:29
summary: Enable function to be used for generating a grid operation link; use this functionality to generate link for viewing/editing visualization.
affected #: 2 files
diff -r df99df26d92fa8a5e3eaef4fd1eef4b6ee18f86d -r 3d32fa091951e9aa72f3c69272960b3f6b648722 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -5,7 +5,7 @@
from galaxy.util.sanitize_html import sanitize_html
class VisualizationListGrid( grids.Grid ):
- def get_link( item ):
+ def get_url_args( item ):
"""
Returns dictionary used to create item link.
"""
@@ -24,7 +24,7 @@
default_sort_key = "-update_time"
default_filter = dict( title="All", deleted="False", tags="All", sharing="All" )
columns = [
- grids.TextColumn( "Title", key="title", attach_popup=True, link=get_link ),
+ grids.TextColumn( "Title", key="title", attach_popup=True, link=get_url_args ),
grids.TextColumn( "Type", key="type" ),
grids.TextColumn( "Dbkey", key="dbkey" ),
grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
@@ -42,7 +42,7 @@
grids.GridAction( "Create new visualization", dict( action='create' ) )
]
operations = [
- grids.GridOperation( "View/Edit", allow_multiple=False, url_args=dict( controller='tracks', action='browser' ) ),
+ grids.GridOperation( "View/Edit", allow_multiple=False, url_args=get_url_args ),
grids.GridOperation( "Edit Attributes", allow_multiple=False, url_args=dict( action='edit') ),
grids.GridOperation( "Copy", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False, url_args=dict( action='clone') ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
diff -r df99df26d92fa8a5e3eaef4fd1eef4b6ee18f86d -r 3d32fa091951e9aa72f3c69272960b3f6b648722 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -728,9 +728,12 @@
self.global_operation = global_operation
def get_url_args( self, item ):
if self.url_args:
- temp = dict( self.url_args )
- temp['id'] = item.id
- return temp
+ if hasattr( self.url_args, '__call__' ):
+ url_args = self.url_args( item )
+ else:
+ url_args = dict( self.url_args )
+ url_args['id'] = item.id
+ return url_args
else:
return dict( operation=self.label, id=item.id )
def allowed( self, item ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: inithello: Added script to update NCBI builds list, replaced Broad data source with direct URL, cleaned up help text.
by Bitbucket 23 Aug '12
by Bitbucket 23 Aug '12
23 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/df99df26d92f/
changeset: df99df26d92f
user: inithello
date: 2012-08-23 22:36:58
summary: Added script to update NCBI builds list, replaced Broad data source with direct URL, cleaned up help text.
affected #: 4 files
diff -r 429c1054c13e9c2206ccd4bc3dfd1ca1fa75973c -r df99df26d92fa8a5e3eaef4fd1eef4b6ee18f86d cron/get_ncbi.py
--- /dev/null
+++ b/cron/get_ncbi.py
@@ -0,0 +1,93 @@
+import urllib, pkg_resources, os
+pkg_resources.require( 'elementtree' )
+from elementtree import ElementTree, ElementInclude
+from xml.parsers.expat import ExpatError as XMLParseErrorThing
+import sys
+
+import pkg_resources
+
+class GetListing:
+ def __init__( self, data ):
+ self.tree = ElementTree.parse( data )
+ self.root = self.tree.getroot()
+ ElementInclude.include(self.root)
+
+ def xml_text(self, name=None):
+ """Returns the text inside an element"""
+ root = self.root
+ if name is not None:
+ # Try attribute first
+ val = root.get(name)
+ if val:
+ return val
+ # Then try as element
+ elem = root.find(name)
+ else:
+ elem = root
+ if elem is not None and elem.text:
+ text = ''.join(elem.text.splitlines())
+ return text.strip()
+ # No luck, return empty string
+ return ''
+
+def dlcachefile( webenv, querykey, i, results ):
+ url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=nuccore&usehis…'
+ fp = urllib.urlopen( url )
+ search = GetListing( fp )
+ fp.close()
+ webenv = search.xml_text( 'WebEnv' )
+ querykey = search.xml_text( 'QueryKey' )
+ url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=nuccore&WebEn…' % ( webenv, querykey, i, results )
+ fp = urllib.urlopen( url )
+ cachefile = os.tmpfile()
+ for line in fp:
+ cachefile.write( line )
+ fp.close()
+ cachefile.flush()
+ cachefile.seek(0)
+ return cachefile
+
+
+url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=nuccore&usehis…'
+fp = urllib.urlopen( url )
+results = GetListing( fp )
+fp.close()
+
+webenv = results.xml_text( 'WebEnv' )
+querykey = results.xml_text( 'QueryKey' )
+counts = int( results.xml_text( 'Count' ) )
+results = 10000
+found = 0
+
+for i in range(0, counts + results, results):
+ rets = dict()
+ cache = dlcachefile( webenv, querykey, i, results )
+ try:
+ xmldoc = GetListing( cache )
+ except (IOError, XMLParseErrorThing):
+ cache = dlcachefile( webenv, querykey, i, results )
+ try:
+ xmldoc = GetListing( cache )
+ except (IOError, XMLParseErrorThing):
+ cache.close()
+ exit()
+ pass
+ finally:
+ cache.close()
+ entries = xmldoc.root.findall( 'DocSum' )
+ for entry in entries:
+ dbkey = None
+ children = entry.findall('Item')
+ for item in children:
+ rets[ item.get('Name') ] = item.text
+ if not rets['Caption'].startswith('NC_'):
+ continue
+
+ for ret in rets['Extra'].split('|'):
+ if not ret.startswith('NC_'):
+ continue
+ else:
+ dbkey = ret
+ break
+ if dbkey is not None:
+ print '\t'.join( [ dbkey, rets['Title'] ] )
diff -r 429c1054c13e9c2206ccd4bc3dfd1ca1fa75973c -r df99df26d92fa8a5e3eaef4fd1eef4b6ee18f86d cron/updatencbi.sh.sample
--- /dev/null
+++ b/cron/updatencbi.sh.sample
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# Script to update NCBI shared data tables. The idea is to update, but if
+# the update fails, not replace current data/tables with error
+# messages.
+
+# Edit this line to refer to galaxy's path:
+GALAXY=/path/to/galaxy
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
+
+# setup directories
+echo "Creating required directories."
+DIRS="
+${GALAXY}/tool-data/shared/ncbi
+${GALAXY}/tool-data/shared/ncbi/new
+"
+for dir in $DIRS; do
+ if [ ! -d $dir ]; then
+ echo "Creating $dir"
+ mkdir $dir
+ else
+ echo "$dir already exists, continuing."
+ fi
+done
+
+date
+echo "Updating NCBI shared data tables."
+
+# Try to build "builds.txt"
+echo "Updating builds.txt"
+python ${GALAXY}/cron/get_ncbi.py > ${GALAXY}/tool-data/shared/ncbi/new/builds.txt
+if [ $? -eq 0 ]
+then
+ diff ${GALAXY}/tool-data/shared/ncbi/new/builds.txt ${GALAXY}/tool-data/shared/ncbi/builds.txt > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ cp -f ${GALAXY}/tool-data/shared/ncbi/new/builds.txt ${GALAXY}/tool-data/shared/ncbi/builds.txt
+ fi
+else
+ echo "Failed to update builds.txt" >&2
+fi
diff -r 429c1054c13e9c2206ccd4bc3dfd1ca1fa75973c -r df99df26d92fa8a5e3eaef4fd1eef4b6ee18f86d lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -75,7 +75,7 @@
@web.require_admin
def add_genome( self, trans, **kwd ):
if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
- return trans.fill_template( '/admin/data_admin/betajob.mako' )
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.' )
dbkeys = trans.ucsc_builds
ensemblkeys = trans.ensembl_builds
ncbikeys = trans.ncbi_builds
@@ -137,9 +137,10 @@
dbkey = build.split( ': ' )[0]
longname = build.split( ': ' )[-1]
url = 'http://togows.dbcls.jp/entry/ncbi-nucleotide/%s.fasta' % dbkey
- elif source == 'Broad':
- dbkey = params.get('broad_dbkey', '')[0]
- url = 'ftp://ftp.broadinstitute.org/pub/seq/references/%s.fasta' % dbkey
+ elif source == 'URL':
+ dbkey = params.get( 'url_dbkey', '' )
+ url = params.get( 'url', None )
+ longname = params.get( 'longname', None )
elif source == 'UCSC':
longname = None
for build in trans.ucsc_builds:
@@ -198,7 +199,9 @@
break
url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s.%s.dna.toplevel.fa.…' % ( release, pathname.lower(), pathname, dbkey, release )
else:
- raise ValueError, 'Somehow an invalid data source was specified.'
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Somehow an invalid data source was specified.' )
+ if url is None:
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Unable to generate a valid URL with the specified parameters.' )
params = dict( protocol='http', name=dbkey, datatype='fasta', url=url, user=trans.user.id )
jobid = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].create_job( trans, url, dbkey, longname, indexers )
chainjob = []
diff -r 429c1054c13e9c2206ccd4bc3dfd1ca1fa75973c -r df99df26d92fa8a5e3eaef4fd1eef4b6ee18f86d templates/admin/data_admin/data_form.mako
--- a/templates/admin/data_admin/data_form.mako
+++ b/templates/admin/data_admin/data_form.mako
@@ -53,7 +53,7 @@
<label for="source">Data Source</label><select id="datasource" name="source" label="Data Source"><option value="UCSC">UCSC</option>
- <option value="Broad">Broad Institute</option>
+ <option value="URL">Direct Link</option><option value="NCBI">NCBI</option><option value="Ensembl">EnsemblGenome</option></select>
@@ -75,22 +75,31 @@
</div></div><h2>Parameters</h2>
- <div id="params_Broad" class="params-block">
+ <div id="params_URL" class="params-block"><div class="form-row">
- <label for="longname">Internal Name</label>
- <input name="longname" type="text" label="Internal Name" />
+ <label for="longname">Long Name</label>
+ <input name="longname" type="text" label="Long Name" /><div style="clear: both;"> </div>
+ <div class="toolParamHelp" style="clear: both;">
+ A descriptive name for this build.
+ </div></div><div class="form-row">
- <label for="uniqid">Internal Unique Identifier</label>
- <input name="uniqid" type="text" label="Internal Identifier" />
+ <label for="url_dbkey">DB Key</label>
+ <input name="url_dbkey" type="text" label="DB Key" /><div style="clear: both;"> </div>
+ <div class="toolParamHelp" style="clear: both;">
+ The internal DB key for this build. WARNING: Using a value that already exists in one or more .loc files may have unpredictable results.
+ </div></div><div id="dlparams"><div class="form-row">
- <label for="broad_dbkey">External Name</label>
- <input name="broad_dbkey" type="text" label="Genome Unique Name" />
+ <label for="url">URL</label>
+ <input name="url" type="text" label="URL" /><div style="clear: both;"> </div>
+ <div class="toolParamHelp" style="clear: both;">
+ The URL to download this build from.
+ </div></div></div></div>
@@ -101,7 +110,9 @@
<input type="text" class="text-and-autocomplete-select ac_input" size="40" name="ncbi_name" id="ncbi_name" value="" /></div><div class="toolParamHelp" style="clear: both;">
- If you can't find the build you want in this list, <insert link to instructions here>
+ If you can't find the build you want in this list, open a terminal and execute
+ <pre>sh cron/updatencbi.sh</pre>
+ in your galaxy root directory.
</div></div></div>
@@ -116,7 +127,9 @@
</select></div><div class="toolParamHelp" style="clear: both;">
- If you can't find the build you want in this list, <insert link to instructions here>
+ If you can't find the build you want in this list, open a terminal and execute
+ <pre>sh cron/updateensembl.sh</pre>
+ in your galaxy root directory.
</div></div></div>
@@ -135,7 +148,9 @@
</select></div><div class="toolParamHelp" style="clear: both;">
- If you can't find the build you want in this list, <insert link to instructions here>
+ If you can't find the build you want in this list, open a terminal and execute
+ <pre>sh cron/updateucsc.sh</pre>
+ in your galaxy root directory.
</div></div></div>
@@ -145,7 +160,6 @@
<script type="text/javascript">
$(document).ready(function() {
checkDataSource();
- // Replace dbkey select with search+select.
});
$('#datasource').change(function() {
checkDataSource();
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Require pileup format as input to pileup parser tool.
by Bitbucket 23 Aug '12
by Bitbucket 23 Aug '12
23 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/429c1054c13e/
changeset: 429c1054c13e
user: jgoecks
date: 2012-08-23 21:58:11
summary: Require pileup format as input to pileup parser tool.
affected #: 1 file
diff -r 3bd47802bff7946958b0a616e3d42aef50c63277 -r 429c1054c13e9c2206ccd4bc3dfd1ca1fa75973c tools/samtools/pileup_parser.xml
--- a/tools/samtools/pileup_parser.xml
+++ b/tools/samtools/pileup_parser.xml
@@ -7,7 +7,7 @@
#end if#
</command><inputs>
- <param name="input" type="data" format="tabular" label="Select dataset"/>
+ <param name="input" type="data" format="pileup" label="Select dataset"/><conditional name="pileup_type"><param name="type_select" type="select" label="which contains" help="See "Types of pileup datasets" below for examples"><option value="six" selected="true">Pileup with six columns (simple)</option>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Trackster: for feature tracks, only draw name if available.
by Bitbucket 23 Aug '12
by Bitbucket 23 Aug '12
23 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3bd47802bff7/
changeset: 3bd47802bff7
user: jgoecks
date: 2012-08-23 19:56:34
summary: Trackster: for feature tracks, only draw name if available.
affected #: 1 file
diff -r c06d0ea23c902e13a5d38ed56ab8cad53130fea5 -r 3bd47802bff7946958b0a616e3d42aef50c63277 static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -5690,7 +5690,7 @@
ctx.globalAlpha = 1;
// Draw label for Pack mode.
- if (mode === "Pack" && feature_start > tile_low) {
+ if (feature_name && mode === "Pack" && feature_start > tile_low) {
ctx.fillStyle = label_color;
// FIXME: assumption here that the entire view starts at 0
if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
5 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5e12fae63066/
changeset: 5e12fae63066
user: jmchilton
date: 2012-08-22 04:05:53
summary: Roles API bug fixes.
affected #: 1 file
diff -r e27c236c05e5d65f3ce92f271a712419e7fd73a5 -r 5e12fae63066b12cf3f7f4342ad0ebecb5407a72 lib/galaxy/web/api/roles.py
--- a/lib/galaxy/web/api/roles.py
+++ b/lib/galaxy/web/api/roles.py
@@ -17,7 +17,7 @@
"""
rval = []
for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted == False ):
- if trans.app.security_agent.ok_to_display( trans.user, role ):
+ if trans.user_is_admin() or trans.app.security_agent.ok_to_display( trans.user, role ):
item = role.get_api_value( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id( role.id )
item['url'] = url_for( 'role', id=encoded_id )
@@ -32,7 +32,7 @@
"""
role_id = id
try:
- role_id = trans.security.decode_id( role_id )
+ decoded_role_id = trans.security.decode_id( role_id )
except TypeError:
trans.response.status = 400
return "Malformed role id ( %s ) specified, unable to decode." % str( role_id )
@@ -40,7 +40,7 @@
role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
except:
role = None
- if not role or not trans.app.security_agent.ok_to_display( trans.user, role ):
+ if not role or not (trans.user_is_admin() or trans.app.security_agent.ok_to_display( trans.user, role )):
trans.response.status = 400
return "Invalid role id ( %s ) specified." % str( role_id )
item = role.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } )
https://bitbucket.org/galaxy/galaxy-central/changeset/558547d5329f/
changeset: 558547d5329f
user: jmchilton
date: 2012-08-22 06:05:05
summary: Fix library permissions API which has been broken since security API
changes in 8731db1b2bfb (June 2011).
affected #: 1 file
diff -r 5e12fae63066b12cf3f7f4342ad0ebecb5407a72 -r 558547d5329f49cad685056cd4284e8e7661cffc lib/galaxy/web/api/permissions.py
--- a/lib/galaxy/web/api/permissions.py
+++ b/lib/galaxy/web/api/permissions.py
@@ -40,10 +40,10 @@
role_params = params.get( k + '_in', [] )
in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( trans.security.decode_id( x ) ) for x in util.listify( role_params ) ]
permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
- trans.app.security_agent.set_all_library_permissions( library, permissions )
+ trans.app.security_agent.set_all_library_permissions( trans, library, permissions )
trans.sa_session.refresh( library )
# Copy the permissions to the root folder
- trans.app.security_agent.copy_library_permissions( library, library.root_folder )
+ trans.app.security_agent.copy_library_permissions( trans, library, library.root_folder )
message = "Permissions updated for library '%s'." % library.name
item = library.get_api_value( view='element' )
https://bitbucket.org/galaxy/galaxy-central/changeset/a186a4bd7bae/
changeset: a186a4bd7bae
user: jmchilton
date: 2012-08-22 23:57:04
summary: Create method in libraries API should return the created library not an array containing only the created library. This was confirmed by dannon in the comments on issue 802.
affected #: 1 file
diff -r 558547d5329f49cad685056cd4284e8e7661cffc -r a186a4bd7bae84459de066b22d49e226e2112ce1 lib/galaxy/web/api/libraries.py
--- a/lib/galaxy/web/api/libraries.py
+++ b/lib/galaxy/web/api/libraries.py
@@ -98,7 +98,7 @@
rval['url'] = url_for( 'library', id=encoded_id )
rval['name'] = name
rval['id'] = encoded_id
- return [ rval ]
+ return rval
@web.expose_api
def delete( self, trans, id, **kwd ):
https://bitbucket.org/galaxy/galaxy-central/changeset/bec712216582/
changeset: bec712216582
user: jmchilton
date: 2012-08-23 00:05:53
summary: Include created history url in json rendered by histories API on successful history creation. Dannon verified this is the desired behvaior in the comments on issue 802 in bitbucket.
affected #: 1 file
diff -r a186a4bd7bae84459de066b22d49e226e2112ce1 -r bec712216582134338a733ccd85bc4dac25353c4 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -109,6 +109,7 @@
trans.sa_session.add( new_history )
trans.sa_session.flush()
item = new_history.get_api_value(view='element', value_mapper={'id':trans.security.encode_id})
+ item['url'] = url_for( 'history', id=item['id'] )
return item
@web.expose_api
https://bitbucket.org/galaxy/galaxy-central/changeset/c06d0ea23c90/
changeset: c06d0ea23c90
user: dannon
date: 2012-08-23 15:34:43
summary: Merged in jmchilton/galaxy-central-roles-api-fixes (pull request #62)
affected #: 4 files
diff -r 146d0f4c29decefb441a7133b4aa16442afdacb1 -r c06d0ea23c902e13a5d38ed56ab8cad53130fea5 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -109,6 +109,7 @@
trans.sa_session.add( new_history )
trans.sa_session.flush()
item = new_history.get_api_value(view='element', value_mapper={'id':trans.security.encode_id})
+ item['url'] = url_for( 'history', id=item['id'] )
return item
@web.expose_api
diff -r 146d0f4c29decefb441a7133b4aa16442afdacb1 -r c06d0ea23c902e13a5d38ed56ab8cad53130fea5 lib/galaxy/web/api/libraries.py
--- a/lib/galaxy/web/api/libraries.py
+++ b/lib/galaxy/web/api/libraries.py
@@ -98,7 +98,7 @@
rval['url'] = url_for( 'library', id=encoded_id )
rval['name'] = name
rval['id'] = encoded_id
- return [ rval ]
+ return rval
@web.expose_api
def delete( self, trans, id, **kwd ):
diff -r 146d0f4c29decefb441a7133b4aa16442afdacb1 -r c06d0ea23c902e13a5d38ed56ab8cad53130fea5 lib/galaxy/web/api/permissions.py
--- a/lib/galaxy/web/api/permissions.py
+++ b/lib/galaxy/web/api/permissions.py
@@ -40,10 +40,10 @@
role_params = params.get( k + '_in', [] )
in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( trans.security.decode_id( x ) ) for x in util.listify( role_params ) ]
permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
- trans.app.security_agent.set_all_library_permissions( library, permissions )
+ trans.app.security_agent.set_all_library_permissions( trans, library, permissions )
trans.sa_session.refresh( library )
# Copy the permissions to the root folder
- trans.app.security_agent.copy_library_permissions( library, library.root_folder )
+ trans.app.security_agent.copy_library_permissions( trans, library, library.root_folder )
message = "Permissions updated for library '%s'." % library.name
item = library.get_api_value( view='element' )
diff -r 146d0f4c29decefb441a7133b4aa16442afdacb1 -r c06d0ea23c902e13a5d38ed56ab8cad53130fea5 lib/galaxy/web/api/roles.py
--- a/lib/galaxy/web/api/roles.py
+++ b/lib/galaxy/web/api/roles.py
@@ -17,7 +17,7 @@
"""
rval = []
for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted == False ):
- if trans.app.security_agent.ok_to_display( trans.user, role ):
+ if trans.user_is_admin() or trans.app.security_agent.ok_to_display( trans.user, role ):
item = role.get_api_value( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id( role.id )
item['url'] = url_for( 'role', id=encoded_id )
@@ -32,7 +32,7 @@
"""
role_id = id
try:
- role_id = trans.security.decode_id( role_id )
+ decoded_role_id = trans.security.decode_id( role_id )
except TypeError:
trans.response.status = 400
return "Malformed role id ( %s ) specified, unable to decode." % str( role_id )
@@ -40,7 +40,7 @@
role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
except:
role = None
- if not role or not trans.app.security_agent.ok_to_display( trans.user, role ):
+ if not role or not (trans.user_is_admin() or trans.app.security_agent.ok_to_display( trans.user, role )):
trans.response.status = 400
return "Invalid role id ( %s ) specified." % str( role_id )
item = role.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Fix for handling missing quality values in VCF data provider + some whitespace. Thanks to Jim Johnson for fix.
by Bitbucket 23 Aug '12
by Bitbucket 23 Aug '12
23 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/146d0f4c29de/
changeset: 146d0f4c29de
user: jgoecks
date: 2012-08-23 15:04:08
summary: Fix for handling missing quality values in VCF data provider + some whitespace. Thanks to Jim Johnson for fix.
affected #: 1 file
diff -r dd8a939cf0a26ea9e3cb969297bbfe7d749e88f7 -r 146d0f4c29decefb441a7133b4aa16442afdacb1 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -625,7 +625,8 @@
end = start + len( new_seq )
# Pack line.
- payload = [ hash( line ),
+ payload = [
+ hash( line ),
start,
end,
# ID:
@@ -634,7 +635,8 @@
# TODO? VCF does not have strand, so default to positive.
"+",
new_seq,
- float( feature[5] ) ]
+ None if feature[5] == '.' else float( feature[5] )
+ ]
rval.append(payload)
return { 'data': rval, 'message': message }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Update datatypes conf sample to include bedgraph to bigwig converter.
by Bitbucket 22 Aug '12
by Bitbucket 22 Aug '12
22 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/dd8a939cf0a2/
changeset: dd8a939cf0a2
user: jgoecks
date: 2012-08-23 04:54:54
summary: Update datatypes conf sample to include bedgraph to bigwig converter.
affected #: 1 file
diff -r f0e26a4f7dd91d9a632913ae450e285e37a9a302 -r dd8a939cf0a26ea9e3cb969297bbfe7d749e88f7 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -23,7 +23,9 @@
<display file="genetrack.xml" /><display file="igb/bed.xml" /></datatype>
- <datatype extension="bedgraph" type="galaxy.datatypes.interval:BedGraph" display_in_upload="true" />
+ <datatype extension="bedgraph" type="galaxy.datatypes.interval:BedGraph" display_in_upload="true">
+ <converter file="bedgraph_to_bigwig_converter.xml" target_datatype="bigwig"/>
+ </datatype><datatype extension="bedstrict" type="galaxy.datatypes.interval:BedStrict" /><datatype extension="bed6" type="galaxy.datatypes.interval:Bed6"><converter file="bed_to_genetrack_converter.xml" target_datatype="genetrack"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f0e26a4f7dd9/
changeset: f0e26a4f7dd9
user: jgoecks
date: 2012-08-22 22:49:12
summary: Output sorted BAM from Bowtie2.
affected #: 1 file
diff -r 695159af542b44ac5038d1b3cc401b3f4b98d2e3 -r f0e26a4f7dd91d9a632913ae450e285e37a9a302 tools/sr_mapping/bowtie2_wrapper.py
--- a/tools/sr_mapping/bowtie2_wrapper.py
+++ b/tools/sr_mapping/bowtie2_wrapper.py
@@ -64,8 +64,8 @@
else:
index_path = options.index_path
- # Build bowtie command.
- cmd = 'bowtie2 %s -x %s %s | samtools view -Sb - > %s'
+ # Build bowtie command; use view and sort to create sorted bam.
+ cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - sorted > %s'
# Set up reads.
if options.single_paired == 'paired':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Remove blast tools and test data from the distribution, update tool_conf.xml.sample to reflect this.
by Bitbucket 22 Aug '12
by Bitbucket 22 Aug '12
22 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/695159af542b/
changeset: 695159af542b
user: dannon
date: 2012-08-22 22:12:59
summary: Remove blast tools and test data from the distribution, update tool_conf.xml.sample to reflect this.
affected #: 36 files
Diff too large to display.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0