4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e662438a3641/
Changeset: e662438a3641
User: nsoranzo
Date: 2014-02-26 19:27:17
Summary: Workflow API: add tool version and parameters for each step when showing a workflow.
Otherwise to get this information an extra API call to download the JSON
representation, followed by a tricky mapping of the step ids are necessary.
Affected #: 1 file
diff -r 451b411b4b19ca34ed154352b07b3f49983d1197 -r e662438a3641ec18fdb396348f190664832d80ef lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -79,7 +79,7 @@
user=trans.user ).join( 'stored_workflow' ).filter(
trans.app.model.StoredWorkflow.deleted == False ).order_by(
desc( trans.app.model.StoredWorkflow.update_time ) ).all():
- item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id })
+ item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
item['url'] = url_for( 'workflow', id=encoded_id )
rval.append(item)
@@ -127,6 +127,8 @@
steps[step.id] = {'id': step.id,
'type': step.type,
'tool_id': step.tool_id,
+ 'tool_version': step.tool_version,
+ 'tool_inputs': step.tool_inputs,
'input_steps': {}}
for conn in step.input_connections:
steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id,
@@ -139,7 +141,7 @@
"""
POST /api/workflows
- We're not creating workflows from the api. Just execute for now.
+ We're not creating workflows from the api. Just execute for now.
However, we will import them if installed_repository_file is specified
"""
@@ -210,7 +212,7 @@
else:
trans.response.status = 400
return "Unknown dataset source '%s' specified." % ds_map[k]['src']
- if add_to_history and hda.history != history:
+ if add_to_history and hda.history != history:
hda = hda.copy()
history.add_dataset(hda)
ds_map[k]['hda'] = hda
@@ -260,7 +262,7 @@
trans.response.status = 400
return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
else:
- # This is an input step. Make sure we have an available input.
+ # This is an input step. Make sure we have an available input.
if step.type == 'data_input' and str(step.id) not in ds_map:
trans.response.status = 400
return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
@@ -387,7 +389,7 @@
"""
# Pull parameters out of payload.
workflow_id = payload.get('workflow_id', None)
- if workflow_id == None:
+ if workflow_id is None:
raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." )
try:
stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False )
@@ -452,7 +454,7 @@
if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
raise exceptions.ItemOwnershipException()
results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id)
- results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
+ results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
out = results.first()
if out is not None:
return self.encode_all_ids( trans, out.to_dict('element'), True)
https://bitbucket.org/galaxy/galaxy-central/commits/7a74c3b3fa1e/
Changeset: 7a74c3b3fa1e
User: nsoranzo
Date: 2014-02-26 19:38:43
Summary: Add documentation to workflows API create().
Affected #: 1 file
diff -r e662438a3641ec18fdb396348f190664832d80ef -r 7a74c3b3fa1e8ea52882fbfde4d3037519789390 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -143,7 +143,28 @@
We're not creating workflows from the api. Just execute for now.
- However, we will import them if installed_repository_file is specified
+ However, we will import them if installed_repository_file is specified.
+
+ :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified
+ :type installed_repository_file str
+
+ :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified
+ :type workflow_id: str
+
+ :param parameters: See _update_step_parameters()
+ :type parameters: dict
+
+ :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
+ :type ds_map: dict
+
+ :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history
+ :type no_add_to_history: str
+
+ :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history
+ :type history: str
+
+ :param replacement_params: A dictionary used when renaming datasets
+ :type replacement_params: dict
"""
# Pull parameters out of payload.
https://bitbucket.org/galaxy/galaxy-central/commits/eb8a8eddb1fc/
Changeset: eb8a8eddb1fc
User: nsoranzo
Date: 2014-02-26 19:54:11
Summary: Workflows API: really allow import with installed_repository_file by not crashing if workflow_id, ds_map or history are not specified.
Reorder code so that a new history is created after checking the sanity checks.
Affected #: 1 file
diff -r 7a74c3b3fa1e8ea52882fbfde4d3037519789390 -r eb8a8eddb1fc79913a10832ca73be049a719949e lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -168,11 +168,11 @@
"""
# Pull parameters out of payload.
- workflow_id = payload['workflow_id']
+ workflow_id = payload.get('workflow_id', None)
param_map = payload.get('parameters', {})
- ds_map = payload['ds_map']
+ ds_map = payload.get('ds_map', {})
add_to_history = 'no_add_to_history' not in payload
- history_param = payload['history']
+ history_param = payload.get('history', '')
# Get/create workflow.
if not workflow_id:
@@ -198,6 +198,20 @@
return("Workflow is not owned by or shared with current user")
workflow = stored_workflow.latest_workflow
+ # Sanity checks.
+ if not workflow:
+ trans.response.status = 400
+ return "Workflow not found."
+ if len( workflow.steps ) == 0:
+ trans.response.status = 400
+ return "Workflow cannot be run because it does not have any steps"
+ if workflow.has_cycles:
+ trans.response.status = 400
+ return "Workflow cannot be run because it contains cycles"
+ if workflow.has_errors:
+ trans.response.status = 400
+ return "Workflow cannot be run because of validation errors in some steps"
+
# Get target history.
if history_param.startswith('hist_id='):
#Passing an existing history to use.
@@ -241,22 +255,7 @@
trans.response.status = 400
return "Invalid Dataset '%s' Specified" % ds_map[k]['id']
- # Sanity checks.
- if not workflow:
- trans.response.status = 400
- return "Workflow not found."
- if len( workflow.steps ) == 0:
- trans.response.status = 400
- return "Workflow cannot be run because it does not have any steps"
- if workflow.has_cycles:
- trans.response.status = 400
- return "Workflow cannot be run because it contains cycles"
- if workflow.has_errors:
- trans.response.status = 400
- return "Workflow cannot be run because of validation errors in some steps"
-
# Build the state for each step
- rval = {}
for step in workflow.steps:
step_errors = None
input_connections_by_name = {}
@@ -291,12 +290,7 @@
step.state = step.module.get_runtime_state()
# Run each step, connecting outputs to inputs
- outputs = util.odict.odict()
- rval['history'] = trans.security.encode_id(history.id)
- rval['outputs'] = []
-
replacement_dict = payload.get('replacement_params', {})
-
outputs = invoke(
trans=trans,
workflow=workflow,
@@ -308,6 +302,9 @@
# Build legacy output - should probably include more information from
# outputs.
+ rval = {}
+ rval['history'] = trans.security.encode_id(history.id)
+ rval['outputs'] = []
for step in workflow.steps:
if step.type == 'tool' or step.type is None:
for v in outputs[ step.id ].itervalues():
https://bitbucket.org/galaxy/galaxy-central/commits/d66016004bca/
Changeset: d66016004bca
User: jmchilton
Date: 2014-03-04 14:39:06
Summary: Merged in nsoranzo/galaxy-central (pull request #341)
Workflows API enhancements (pull request #337 corrected)
Affected #: 1 file
diff -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 -r d66016004bca809cb3bc1fcd630c6d336bf92a9e lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -79,7 +79,7 @@
user=trans.user ).join( 'stored_workflow' ).filter(
trans.app.model.StoredWorkflow.deleted == False ).order_by(
desc( trans.app.model.StoredWorkflow.update_time ) ).all():
- item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id })
+ item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
item['url'] = url_for( 'workflow', id=encoded_id )
rval.append(item)
@@ -127,6 +127,8 @@
steps[step.id] = {'id': step.id,
'type': step.type,
'tool_id': step.tool_id,
+ 'tool_version': step.tool_version,
+ 'tool_inputs': step.tool_inputs,
'input_steps': {}}
for conn in step.input_connections:
steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id,
@@ -139,17 +141,38 @@
"""
POST /api/workflows
- We're not creating workflows from the api. Just execute for now.
+ We're not creating workflows from the api. Just execute for now.
- However, we will import them if installed_repository_file is specified
+ However, we will import them if installed_repository_file is specified.
+
+ :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified
+ :type installed_repository_file str
+
+ :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified
+ :type workflow_id: str
+
+ :param parameters: See _update_step_parameters()
+ :type parameters: dict
+
+ :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
+ :type ds_map: dict
+
+ :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history
+ :type no_add_to_history: str
+
+ :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history
+ :type history: str
+
+ :param replacement_params: A dictionary used when renaming datasets
+ :type replacement_params: dict
"""
# Pull parameters out of payload.
- workflow_id = payload['workflow_id']
+ workflow_id = payload.get('workflow_id', None)
param_map = payload.get('parameters', {})
- ds_map = payload['ds_map']
+ ds_map = payload.get('ds_map', {})
add_to_history = 'no_add_to_history' not in payload
- history_param = payload['history']
+ history_param = payload.get('history', '')
# Get/create workflow.
if not workflow_id:
@@ -175,6 +198,20 @@
return("Workflow is not owned by or shared with current user")
workflow = stored_workflow.latest_workflow
+ # Sanity checks.
+ if not workflow:
+ trans.response.status = 400
+ return "Workflow not found."
+ if len( workflow.steps ) == 0:
+ trans.response.status = 400
+ return "Workflow cannot be run because it does not have any steps"
+ if workflow.has_cycles:
+ trans.response.status = 400
+ return "Workflow cannot be run because it contains cycles"
+ if workflow.has_errors:
+ trans.response.status = 400
+ return "Workflow cannot be run because of validation errors in some steps"
+
# Get target history.
if history_param.startswith('hist_id='):
#Passing an existing history to use.
@@ -210,7 +247,7 @@
else:
trans.response.status = 400
return "Unknown dataset source '%s' specified." % ds_map[k]['src']
- if add_to_history and hda.history != history:
+ if add_to_history and hda.history != history:
hda = hda.copy()
history.add_dataset(hda)
ds_map[k]['hda'] = hda
@@ -218,22 +255,7 @@
trans.response.status = 400
return "Invalid Dataset '%s' Specified" % ds_map[k]['id']
- # Sanity checks.
- if not workflow:
- trans.response.status = 400
- return "Workflow not found."
- if len( workflow.steps ) == 0:
- trans.response.status = 400
- return "Workflow cannot be run because it does not have any steps"
- if workflow.has_cycles:
- trans.response.status = 400
- return "Workflow cannot be run because it contains cycles"
- if workflow.has_errors:
- trans.response.status = 400
- return "Workflow cannot be run because of validation errors in some steps"
-
# Build the state for each step
- rval = {}
for step in workflow.steps:
step_errors = None
input_connections_by_name = {}
@@ -260,7 +282,7 @@
trans.response.status = 400
return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
else:
- # This is an input step. Make sure we have an available input.
+ # This is an input step. Make sure we have an available input.
if step.type == 'data_input' and str(step.id) not in ds_map:
trans.response.status = 400
return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
@@ -268,12 +290,7 @@
step.state = step.module.get_runtime_state()
# Run each step, connecting outputs to inputs
- outputs = util.odict.odict()
- rval['history'] = trans.security.encode_id(history.id)
- rval['outputs'] = []
-
replacement_dict = payload.get('replacement_params', {})
-
outputs = invoke(
trans=trans,
workflow=workflow,
@@ -285,6 +302,9 @@
# Build legacy output - should probably include more information from
# outputs.
+ rval = {}
+ rval['history'] = trans.security.encode_id(history.id)
+ rval['outputs'] = []
for step in workflow.steps:
if step.type == 'tool' or step.type is None:
for v in outputs[ step.id ].itervalues():
@@ -387,7 +407,7 @@
"""
# Pull parameters out of payload.
workflow_id = payload.get('workflow_id', None)
- if workflow_id == None:
+ if workflow_id is None:
raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." )
try:
stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False )
@@ -452,7 +472,7 @@
if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
raise exceptions.ItemOwnershipException()
results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id)
- results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
+ results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
out = results.first()
if out is not None:
return self.encode_all_ids( trans, out.to_dict('element'), True)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6956c4b2d9cd/
Changeset: 6956c4b2d9cd
User: dan
Date: 2014-03-03 23:21:50
Summary: Delimit Biostar tags by commas.
Affected #: 1 file
diff -r b6138d0f8a753cfcc76881d18c0e4cac6298a921 -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -135,6 +135,6 @@
# Tool specific information for payload
payload = { 'title':'Need help with "%s" tool' % ( tool.name ),
'content': '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s</p>' % ( tool.name, tool.version, tool.id ),
- 'tag_val': 'galaxy ' + tag_for_tool( tool ) }
+ 'tag_val': ','.join( [ 'galaxy', tag_for_tool( tool ) ] ) }
# Pass on to regular question method
return self.biostar_question_redirect( trans, payload )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b6138d0f8a75/
Changeset: b6138d0f8a75
User: davebgx
Date: 2014-03-03 21:35:26
Summary: Update setup_r_environment to work with the elimination of fabric.
Affected #: 1 file
diff -r 957e0c7548626c2532af6fd00387ec1568b7dce0 -r b6138d0f8a753cfcc76881d18c0e4cac6298a921 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -296,7 +296,7 @@
retain control over the process. This method is named "complex" because it uses queues and
threads to execute a command while capturing and displaying the output.
"""
- wrapped_command = shlex.split( "/bin/sh -c '%s'" % str( command ) )
+ wrapped_command = shlex.split( '/bin/sh -c "%s"' % str( command ) )
# Launch the command as subprocess. A bufsize of 1 means line buffered.
process_handle = subprocess.Popen( wrapped_command,
stdout=subprocess.PIPE,
@@ -607,7 +607,7 @@
with settings( warn_only=True ):
for tarball_name in tarball_names:
cmd = '''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR; export R_LIBS;
- Rscript -e "install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)"''' % ( str( tarball_name ) )
+ Rscript -e \\"install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)\\"''' % ( str( tarball_name ) )
cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
return_code = handle_command( app, tool_dependency, install_dir, cmd )
if return_code:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/96bf0719d590/
Changeset: 96bf0719d590
User: afgane
Date: 2014-03-03 17:39:40
Summary: Add kwd to get_dbkeys method signature to fix the API call that includes the actual API key in the request
Affected #: 1 file
diff -r edd17b90c126d73451fe0a080ba4c75cff5a92e5 -r 96bf0719d590025a818828245e66e3b08de9f828 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -210,7 +210,7 @@
rval = self.genomes[ dbkey ]
return rval
- def get_dbkeys( self, trans, chrom_info=False ):
+ def get_dbkeys( self, trans, chrom_info=False, **kwd ):
""" Returns all known dbkeys. If chrom_info is True, only dbkeys with
chromosome lengths are returned. """
dbkeys = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/edd17b90c126/
Changeset: edd17b90c126
User: dan
Date: 2014-03-03 16:09:19
Summary: Use qualified url for biostar_url_redirect due to javascript always prepending galaxy base prefix (e.g. usually /) unless url contains //
Affected #: 1 file
diff -r 4da230085f06db45563ab3ee27d07848dd8950de -r edd17b90c126d73451fe0a080ba4c75cff5a92e5 templates/webapps/galaxy/galaxy.masthead.mako
--- a/templates/webapps/galaxy/galaxy.masthead.mako
+++ b/templates/webapps/galaxy/galaxy.masthead.mako
@@ -40,7 +40,7 @@
'enable_cloud_launch' : app.config.get_bool('enable_cloud_launch', False),
'lims_doc_url' : app.config.get("lims_doc_url", "http://main.g2.bx.psu.edu/u/rkchak/p/sts"),
'biostar_url' : app.config.biostar_url,
- 'biostar_url_redirect' : h.url_for(controller='biostar', action='biostar_redirect', biostar_action='show_tag_galaxy'),
+ 'biostar_url_redirect' : h.url_for( controller='biostar', action='biostar_redirect', biostar_action='show_tag_galaxy', qualified=True ),
'support_url' : app.config.get("support_url", "http://wiki.galaxyproject.org/Support"),
'search_url' : app.config.get("search_url", "http://galaxyproject.org/search/usegalaxy/"),
'mailing_lists' : app.config.get("mailing_lists", "http://wiki.galaxyproject.org/MailingLists"),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.