galaxy-commits
Threads by month
- ----- 2024 -----
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2014
- 1 participants
- 170 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b1b8acdf8c72/
Changeset: b1b8acdf8c72
Branch: stable
User: jmchilton
Date: 2014-03-04 14:47:58
Summary: Fix Torque CLI runner to recognize complete ('C') state as 'ok'.
Affected #: 1 file
diff -r 7f7da7248d4de5d9f039639db77721d0b22fa6c5 -r b1b8acdf8c7268dfded0a358acfe31308196381a lib/galaxy/jobs/runners/cli_job/torque.py
--- a/lib/galaxy/jobs/runners/cli_job/torque.py
+++ b/lib/galaxy/jobs/runners/cli_job/torque.py
@@ -131,4 +131,5 @@
def __get_job_state(self, state):
return { 'E' : job_states.RUNNING,
'R' : job_states.RUNNING,
- 'Q' : job_states.QUEUED }.get(state, state)
+ 'Q' : job_states.QUEUED,
+ 'C' : job_states.OK }.get(state, state)
https://bitbucket.org/galaxy/galaxy-central/commits/ed9228eb4c05/
Changeset: ed9228eb4c05
User: jmchilton
Date: 2014-03-04 14:48:28
Summary: Merge stable.
Affected #: 1 file
diff -r d66016004bca809cb3bc1fcd630c6d336bf92a9e -r ed9228eb4c05ffba9c4c4dd9b496596c5725d306 lib/galaxy/jobs/runners/cli_job/torque.py
--- a/lib/galaxy/jobs/runners/cli_job/torque.py
+++ b/lib/galaxy/jobs/runners/cli_job/torque.py
@@ -131,4 +131,5 @@
def __get_job_state(self, state):
return { 'E' : job_states.RUNNING,
'R' : job_states.RUNNING,
- 'Q' : job_states.QUEUED }.get(state, state)
+ 'Q' : job_states.QUEUED,
+ 'C' : job_states.OK }.get(state, state)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e662438a3641/
Changeset: e662438a3641
User: nsoranzo
Date: 2014-02-26 19:27:17
Summary: Workflow API: add tool version and parameters for each step when showing a workflow.
Otherwise to get this information an extra API call to download the JSON
representation, followed by a tricky mapping of the step ids are necessary.
Affected #: 1 file
diff -r 451b411b4b19ca34ed154352b07b3f49983d1197 -r e662438a3641ec18fdb396348f190664832d80ef lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -79,7 +79,7 @@
user=trans.user ).join( 'stored_workflow' ).filter(
trans.app.model.StoredWorkflow.deleted == False ).order_by(
desc( trans.app.model.StoredWorkflow.update_time ) ).all():
- item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id })
+ item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
item['url'] = url_for( 'workflow', id=encoded_id )
rval.append(item)
@@ -127,6 +127,8 @@
steps[step.id] = {'id': step.id,
'type': step.type,
'tool_id': step.tool_id,
+ 'tool_version': step.tool_version,
+ 'tool_inputs': step.tool_inputs,
'input_steps': {}}
for conn in step.input_connections:
steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id,
@@ -139,7 +141,7 @@
"""
POST /api/workflows
- We're not creating workflows from the api. Just execute for now.
+ We're not creating workflows from the api. Just execute for now.
However, we will import them if installed_repository_file is specified
"""
@@ -210,7 +212,7 @@
else:
trans.response.status = 400
return "Unknown dataset source '%s' specified." % ds_map[k]['src']
- if add_to_history and hda.history != history:
+ if add_to_history and hda.history != history:
hda = hda.copy()
history.add_dataset(hda)
ds_map[k]['hda'] = hda
@@ -260,7 +262,7 @@
trans.response.status = 400
return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
else:
- # This is an input step. Make sure we have an available input.
+ # This is an input step. Make sure we have an available input.
if step.type == 'data_input' and str(step.id) not in ds_map:
trans.response.status = 400
return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
@@ -387,7 +389,7 @@
"""
# Pull parameters out of payload.
workflow_id = payload.get('workflow_id', None)
- if workflow_id == None:
+ if workflow_id is None:
raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." )
try:
stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False )
@@ -452,7 +454,7 @@
if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
raise exceptions.ItemOwnershipException()
results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id)
- results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
+ results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
out = results.first()
if out is not None:
return self.encode_all_ids( trans, out.to_dict('element'), True)
https://bitbucket.org/galaxy/galaxy-central/commits/7a74c3b3fa1e/
Changeset: 7a74c3b3fa1e
User: nsoranzo
Date: 2014-02-26 19:38:43
Summary: Add documentation to workflows API create().
Affected #: 1 file
diff -r e662438a3641ec18fdb396348f190664832d80ef -r 7a74c3b3fa1e8ea52882fbfde4d3037519789390 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -143,7 +143,28 @@
We're not creating workflows from the api. Just execute for now.
- However, we will import them if installed_repository_file is specified
+ However, we will import them if installed_repository_file is specified.
+
+ :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified
+ :type installed_repository_file str
+
+ :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified
+ :type workflow_id: str
+
+ :param parameters: See _update_step_parameters()
+ :type parameters: dict
+
+ :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
+ :type ds_map: dict
+
+ :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history
+ :type no_add_to_history: str
+
+ :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history
+ :type history: str
+
+ :param replacement_params: A dictionary used when renaming datasets
+ :type replacement_params: dict
"""
# Pull parameters out of payload.
https://bitbucket.org/galaxy/galaxy-central/commits/eb8a8eddb1fc/
Changeset: eb8a8eddb1fc
User: nsoranzo
Date: 2014-02-26 19:54:11
Summary: Workflows API: really allow import with installed_repository_file by not crashing if workflow_id, ds_map or history are not specified.
Reorder code so that a new history is created after checking the sanity checks.
Affected #: 1 file
diff -r 7a74c3b3fa1e8ea52882fbfde4d3037519789390 -r eb8a8eddb1fc79913a10832ca73be049a719949e lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -168,11 +168,11 @@
"""
# Pull parameters out of payload.
- workflow_id = payload['workflow_id']
+ workflow_id = payload.get('workflow_id', None)
param_map = payload.get('parameters', {})
- ds_map = payload['ds_map']
+ ds_map = payload.get('ds_map', {})
add_to_history = 'no_add_to_history' not in payload
- history_param = payload['history']
+ history_param = payload.get('history', '')
# Get/create workflow.
if not workflow_id:
@@ -198,6 +198,20 @@
return("Workflow is not owned by or shared with current user")
workflow = stored_workflow.latest_workflow
+ # Sanity checks.
+ if not workflow:
+ trans.response.status = 400
+ return "Workflow not found."
+ if len( workflow.steps ) == 0:
+ trans.response.status = 400
+ return "Workflow cannot be run because it does not have any steps"
+ if workflow.has_cycles:
+ trans.response.status = 400
+ return "Workflow cannot be run because it contains cycles"
+ if workflow.has_errors:
+ trans.response.status = 400
+ return "Workflow cannot be run because of validation errors in some steps"
+
# Get target history.
if history_param.startswith('hist_id='):
#Passing an existing history to use.
@@ -241,22 +255,7 @@
trans.response.status = 400
return "Invalid Dataset '%s' Specified" % ds_map[k]['id']
- # Sanity checks.
- if not workflow:
- trans.response.status = 400
- return "Workflow not found."
- if len( workflow.steps ) == 0:
- trans.response.status = 400
- return "Workflow cannot be run because it does not have any steps"
- if workflow.has_cycles:
- trans.response.status = 400
- return "Workflow cannot be run because it contains cycles"
- if workflow.has_errors:
- trans.response.status = 400
- return "Workflow cannot be run because of validation errors in some steps"
-
# Build the state for each step
- rval = {}
for step in workflow.steps:
step_errors = None
input_connections_by_name = {}
@@ -291,12 +290,7 @@
step.state = step.module.get_runtime_state()
# Run each step, connecting outputs to inputs
- outputs = util.odict.odict()
- rval['history'] = trans.security.encode_id(history.id)
- rval['outputs'] = []
-
replacement_dict = payload.get('replacement_params', {})
-
outputs = invoke(
trans=trans,
workflow=workflow,
@@ -308,6 +302,9 @@
# Build legacy output - should probably include more information from
# outputs.
+ rval = {}
+ rval['history'] = trans.security.encode_id(history.id)
+ rval['outputs'] = []
for step in workflow.steps:
if step.type == 'tool' or step.type is None:
for v in outputs[ step.id ].itervalues():
https://bitbucket.org/galaxy/galaxy-central/commits/d66016004bca/
Changeset: d66016004bca
User: jmchilton
Date: 2014-03-04 14:39:06
Summary: Merged in nsoranzo/galaxy-central (pull request #341)
Workflows API enhancements (pull request #337 corrected)
Affected #: 1 file
diff -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 -r d66016004bca809cb3bc1fcd630c6d336bf92a9e lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -79,7 +79,7 @@
user=trans.user ).join( 'stored_workflow' ).filter(
trans.app.model.StoredWorkflow.deleted == False ).order_by(
desc( trans.app.model.StoredWorkflow.update_time ) ).all():
- item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id })
+ item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
item['url'] = url_for( 'workflow', id=encoded_id )
rval.append(item)
@@ -127,6 +127,8 @@
steps[step.id] = {'id': step.id,
'type': step.type,
'tool_id': step.tool_id,
+ 'tool_version': step.tool_version,
+ 'tool_inputs': step.tool_inputs,
'input_steps': {}}
for conn in step.input_connections:
steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id,
@@ -139,17 +141,38 @@
"""
POST /api/workflows
- We're not creating workflows from the api. Just execute for now.
+ We're not creating workflows from the api. Just execute for now.
- However, we will import them if installed_repository_file is specified
+ However, we will import them if installed_repository_file is specified.
+
+ :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified
+ :type installed_repository_file str
+
+ :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified
+ :type workflow_id: str
+
+ :param parameters: See _update_step_parameters()
+ :type parameters: dict
+
+ :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
+ :type ds_map: dict
+
+ :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history
+ :type no_add_to_history: str
+
+ :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history
+ :type history: str
+
+ :param replacement_params: A dictionary used when renaming datasets
+ :type replacement_params: dict
"""
# Pull parameters out of payload.
- workflow_id = payload['workflow_id']
+ workflow_id = payload.get('workflow_id', None)
param_map = payload.get('parameters', {})
- ds_map = payload['ds_map']
+ ds_map = payload.get('ds_map', {})
add_to_history = 'no_add_to_history' not in payload
- history_param = payload['history']
+ history_param = payload.get('history', '')
# Get/create workflow.
if not workflow_id:
@@ -175,6 +198,20 @@
return("Workflow is not owned by or shared with current user")
workflow = stored_workflow.latest_workflow
+ # Sanity checks.
+ if not workflow:
+ trans.response.status = 400
+ return "Workflow not found."
+ if len( workflow.steps ) == 0:
+ trans.response.status = 400
+ return "Workflow cannot be run because it does not have any steps"
+ if workflow.has_cycles:
+ trans.response.status = 400
+ return "Workflow cannot be run because it contains cycles"
+ if workflow.has_errors:
+ trans.response.status = 400
+ return "Workflow cannot be run because of validation errors in some steps"
+
# Get target history.
if history_param.startswith('hist_id='):
#Passing an existing history to use.
@@ -210,7 +247,7 @@
else:
trans.response.status = 400
return "Unknown dataset source '%s' specified." % ds_map[k]['src']
- if add_to_history and hda.history != history:
+ if add_to_history and hda.history != history:
hda = hda.copy()
history.add_dataset(hda)
ds_map[k]['hda'] = hda
@@ -218,22 +255,7 @@
trans.response.status = 400
return "Invalid Dataset '%s' Specified" % ds_map[k]['id']
- # Sanity checks.
- if not workflow:
- trans.response.status = 400
- return "Workflow not found."
- if len( workflow.steps ) == 0:
- trans.response.status = 400
- return "Workflow cannot be run because it does not have any steps"
- if workflow.has_cycles:
- trans.response.status = 400
- return "Workflow cannot be run because it contains cycles"
- if workflow.has_errors:
- trans.response.status = 400
- return "Workflow cannot be run because of validation errors in some steps"
-
# Build the state for each step
- rval = {}
for step in workflow.steps:
step_errors = None
input_connections_by_name = {}
@@ -260,7 +282,7 @@
trans.response.status = 400
return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
else:
- # This is an input step. Make sure we have an available input.
+ # This is an input step. Make sure we have an available input.
if step.type == 'data_input' and str(step.id) not in ds_map:
trans.response.status = 400
return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
@@ -268,12 +290,7 @@
step.state = step.module.get_runtime_state()
# Run each step, connecting outputs to inputs
- outputs = util.odict.odict()
- rval['history'] = trans.security.encode_id(history.id)
- rval['outputs'] = []
-
replacement_dict = payload.get('replacement_params', {})
-
outputs = invoke(
trans=trans,
workflow=workflow,
@@ -285,6 +302,9 @@
# Build legacy output - should probably include more information from
# outputs.
+ rval = {}
+ rval['history'] = trans.security.encode_id(history.id)
+ rval['outputs'] = []
for step in workflow.steps:
if step.type == 'tool' or step.type is None:
for v in outputs[ step.id ].itervalues():
@@ -387,7 +407,7 @@
"""
# Pull parameters out of payload.
workflow_id = payload.get('workflow_id', None)
- if workflow_id == None:
+ if workflow_id is None:
raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." )
try:
stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False )
@@ -452,7 +472,7 @@
if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
raise exceptions.ItemOwnershipException()
results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id)
- results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
+ results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id))
out = results.first()
if out is not None:
return self.encode_all_ids( trans, out.to_dict('element'), True)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
8 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/318e2dc3c8f3/
Changeset: 318e2dc3c8f3
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Initial work on test cases for history contents API.
Affected #: 2 files
diff -r 95517f976cca49f984b89c9fdd5b9208b1a11fcb -r 318e2dc3c8f388c1f07937f9c0cda11b60022e1e test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -461,16 +461,21 @@
def post_request( url, data, files={} ):
return __multipart_request( url, data, files, verb="POST" )
- def put_request( url ):
- return __urllib_request( url, 'PUT' )
+ def put_request( url, data=None ):
+ if isinstance( data, dict ):
+ assert False, "This test will fail, Galaxy's webob dependency does not parse out urlencoded PUT/PATCH entity data, API will receive empty payload."
+ return __urllib_request( url, 'PUT', json_str=data )
def delete_request( url ):
return __urllib_request( url, 'DELETE' )
- def __urllib_request( url, verb ):
+ def __urllib_request( url, verb, json_str=None ):
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(url)
request.get_method = lambda: verb
+ if json_str:
+ request.add_header( "content-type", "application/json" )
+ request.add_data(json_str)
try:
response = opener.open(request)
return RequestsLikeResponse( response.read(), status_code=response.getcode() )
diff -r 95517f976cca49f984b89c9fdd5b9208b1a11fcb -r 318e2dc3c8f388c1f07937f9c0cda11b60022e1e test/functional/api/test_history_contents.py
--- /dev/null
+++ b/test/functional/api/test_history_contents.py
@@ -0,0 +1,104 @@
+from base import api
+import json
+
+from .helpers import TestsDatasets
+from .helpers import LibraryPopulator
+from base.interactor import (
+ put_request,
+ delete_request,
+)
+
+
+# TODO: Test anonymous access.
+class HistoryContentsApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+ def setUp( self ):
+ super( HistoryContentsApiTestCase, self ).setUp()
+ self.history_id = self._new_history()
+
+ def test_index_hda_summary( self ):
+ hda1 = self._new_dataset( self.history_id )
+ contents_response = self._get( "histories/%s/contents" % self.history_id )
+ hda_summary = self.__check_for_hda( contents_response, hda1 )
+ assert "display_types" not in hda_summary # Quick summary, not full details
+
+ def test_index_hda_all_details( self ):
+ hda1 = self._new_dataset( self.history_id )
+ contents_response = self._get( "histories/%s/contents?details=all" % self.history_id )
+ hda_details = self.__check_for_hda( contents_response, hda1 )
+ self.__assert_hda_has_full_details( hda_details )
+
+ def test_index_hda_detail_by_id( self ):
+ hda1 = self._new_dataset( self.history_id )
+ contents_response = self._get( "histories/%s/contents?details=%s" % ( self.history_id, hda1[ "id" ] ) )
+ hda_details = self.__check_for_hda( contents_response, hda1 )
+ self.__assert_hda_has_full_details( hda_details )
+
+ def test_show_hda( self ):
+ hda1 = self._new_dataset( self.history_id )
+ show_response = self.__show( hda1 )
+ self._assert_status_code_is( show_response, 200 )
+ self.__assert_matches_hda( hda1, show_response.json() )
+
+ def test_hda_copy( self ):
+ hda1 = self._new_dataset( self.history_id )
+ create_data = dict(
+ source='hda',
+ content=hda1[ "id" ],
+ )
+ second_history_id = self._new_history()
+ assert self.__count_contents( second_history_id ) == 0
+ create_response = self._post( "histories/%s/contents" % second_history_id, create_data )
+ self._assert_status_code_is( create_response, 200 )
+ assert self.__count_contents( second_history_id ) == 1
+
+ # TODO
+ #def test_lda_copy( self ):
+ # pass
+
+ def test_update( self ):
+ hda1 = self._new_dataset( self.history_id )
+ self._wait_for_history( self.history_id )
+ assert str( hda1[ "deleted" ] ).lower() == "false"
+ update_url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1[ "id" ] ), use_key=True )
+ # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
+ body = json.dumps( dict( deleted=True ) )
+ update_response = put_request( update_url, data=body )
+ self._assert_status_code_is( update_response, 200 )
+ show_response = self.__show( hda1 )
+ assert str( show_response.json()[ "deleted" ] ).lower() == "true"
+
+ def test_delete( self ):
+ hda1 = self._new_dataset( self.history_id )
+ self._wait_for_history( self.history_id )
+ assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "false"
+ url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1["id" ] ), use_key=True )
+ delete_response = delete_request( url )
+ assert delete_response.status_code < 300 # Something in the 200s :).
+ assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "true"
+
+ def __show( self, hda ):
+ show_response = self._get( "histories/%s/contents/%s" % ( self.history_id, hda[ "id" ] ) )
+ return show_response
+
+ def __count_contents( self, history_id=None, **kwds ):
+ if history_id == None:
+ history_id = self.history_id
+ contents_response = self._get( "histories/%s/contents" % history_id, kwds )
+ return len( contents_response.json() )
+
+ def __assert_hda_has_full_details( self, hda_details ):
+ self._assert_has_keys( hda_details, "display_types", "display_apps" )
+
+ def __check_for_hda( self, contents_response, hda ):
+ self._assert_status_code_is( contents_response, 200 )
+ contents = contents_response.json()
+ assert len( contents ) == 1
+ hda_summary = contents[ 0 ]
+ self.__assert_matches_hda( hda, hda_summary )
+ return hda_summary
+
+ def __assert_matches_hda( self, input_hda, query_hda ):
+ self._assert_has_keys( query_hda, "id", "name" )
+ assert input_hda[ "name" ] == query_hda[ "name" ]
+ assert input_hda[ "id" ] == query_hda[ "id" ]
https://bitbucket.org/galaxy/galaxy-central/commits/deefb8ee758d/
Changeset: deefb8ee758d
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Include root folder id in output when creating library.
This prevents the need to hit the API one more time to get the root folder ID before creating datasets/folders/etc... in the library.
Affected #: 1 file
diff -r 318e2dc3c8f388c1f07937f9c0cda11b60022e1e -r deefb8ee758d334b8e558a54f284463cba938f54 lib/galaxy/webapps/galaxy/api/libraries.py
--- a/lib/galaxy/webapps/galaxy/api/libraries.py
+++ b/lib/galaxy/webapps/galaxy/api/libraries.py
@@ -130,6 +130,7 @@
new_library['description'] = description
new_library['synopsis'] = synopsis
new_library['id'] = encoded_id
+ new_library['root_folder_id'] = trans.security.encode_id( root_folder.id )
return new_library
def edit( self, trans, encoded_id, payload, **kwd ):
https://bitbucket.org/galaxy/galaxy-central/commits/10d0dd63522f/
Changeset: 10d0dd63522f
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Include LDDA state in library dataset to_dict.
Needed to monitor library dataset uploads (in particular for API tests, but really any monitoring will benefit from this).
Affected #: 1 file
diff -r deefb8ee758d334b8e558a54f284463cba938f54 -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2127,6 +2127,7 @@
parent_library_id = self.folder.parent_library.id,
folder_id = self.folder_id,
model_class = self.__class__.__name__,
+ state = ldda.state,
name = ldda.name,
file_name = ldda.file_name,
uploaded_by = ldda.user.email,
https://bitbucket.org/galaxy/galaxy-central/commits/1741dfeaa06c/
Changeset: 1741dfeaa06c
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Infrastructure for library API testing.
Doesn't really test libraries extensively, but adds enough infrastructure to enable a library dataset to history copy test via API.
Affected #: 3 files
diff -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 test/functional/api/helpers.py
--- a/test/functional/api/helpers.py
+++ b/test/functional/api/helpers.py
@@ -1,6 +1,6 @@
import time
-from json import dumps
-from json import loads
+import json
+import StringIO
from pkg_resources import resource_string
# Simple workflow that takes an input and call cat wrapper on it.
@@ -10,6 +10,8 @@
workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" )
+# TODO: Rework this so it is a stand-alone object like workflow
+# populator below instead of mixin.
class TestsDatasets:
def _new_dataset( self, history_id, content='TestData123', **kwds ):
@@ -19,15 +21,7 @@
return run_response.json()["outputs"][0]
def _wait_for_history( self, history_id, assert_ok=False ):
- while True:
- history_details_response = self._get( "histories/%s" % history_id )
- self._assert_status_code_is( history_details_response, 200 )
- history_state = history_details_response.json()[ "state" ]
- if history_state not in [ "running", "queued" ]:
- break
- time.sleep( .1 )
- if assert_ok:
- self.assertEquals( history_state, 'ok' )
+ wait_on_state( lambda: self._get( "histories/%s" % history_id ), assert_ok=assert_ok )
def _new_history( self, **kwds ):
name = kwds.get( "name", "API Test History" )
@@ -60,7 +54,7 @@
def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
return dict(
tool_id=tool_id,
- inputs=dumps(inputs),
+ inputs=json.dumps(inputs),
history_id=history_id,
**kwds
)
@@ -73,7 +67,7 @@
self.api_test_case = api_test_case
def load_workflow( self, name, content=workflow_str, add_pja=False ):
- workflow = loads( content )
+ workflow = json.loads( content )
workflow[ "name" ] = name
if add_pja:
tool_step = workflow[ "steps" ][ "2" ]
@@ -93,9 +87,99 @@
def create_workflow( self, workflow, **create_kwds ):
data = dict(
- workflow=dumps( workflow ),
+ workflow=json.dumps( workflow ),
**create_kwds
)
upload_response = self.api_test_case._post( "workflows/upload", data=data )
uploaded_workflow_id = upload_response.json()[ "id" ]
return uploaded_workflow_id
+
+
+class LibraryPopulator( object ):
+
+ def __init__( self, api_test_case ):
+ self.api_test_case = api_test_case
+ self.galaxy_interactor = api_test_case.galaxy_interactor
+
+ def new_private_library( self, name ):
+ library = self.new_library( name )
+ library_id = library[ "id" ]
+
+ role_id = self.user_private_role_id()
+ self.set_permissions( library_id, role_id )
+ return library
+
+ def new_library( self, name ):
+ data = dict( name=name )
+ create_response = self.galaxy_interactor.post( "libraries", data=data, admin=True )
+ return create_response.json()
+
+ def set_permissions( self, library_id, role_id=None ):
+ if role_id:
+ perm_list = json.dumps( role_id )
+ else:
+ perm_list = json.dumps( [] )
+
+ permissions = dict(
+ LIBRARY_ACCESS_in=perm_list,
+ LIBRARY_MODIFY_in=perm_list,
+ LIBRARY_ADD_in=perm_list,
+ LIBRARY_MANAGE_in=perm_list,
+ )
+ self.galaxy_interactor.post( "libraries/%s/permissions" % library_id, data=permissions, admin=True )
+
+ def user_email( self ):
+ users_response = self.galaxy_interactor.get( "users" )
+ users = users_response.json()
+ assert len( users ) == 1
+ return users[ 0 ][ "email" ]
+
+ def user_private_role_id( self ):
+ user_email = self.user_email()
+ roles_response = self.api_test_case.galaxy_interactor.get( "roles", admin=True )
+ users_roles = [ r for r in roles_response.json() if r[ "name" ] == user_email ]
+ assert len( users_roles ) == 1
+ return users_roles[ 0 ][ "id" ]
+
+ def create_dataset_request( self, library, **kwds ):
+ create_data = {
+ "folder_id": kwds.get( "folder_id", library[ "root_folder_id" ] ),
+ "create_type": "file",
+ "files_0|NAME": kwds.get( "name", "NewFile" ),
+ "upload_option": kwds.get( "upload_option", "upload_file" ),
+ "file_type": kwds.get( "file_type", "auto" ),
+ "db_key": kwds.get( "db_key", "?" ),
+ }
+ files = {
+ "files_0|file_data": kwds.get( "file", StringIO.StringIO( kwds.get( "contents", "TestData" ) ) ),
+ }
+ return create_data, files
+
+ def new_library_dataset( self, name, **create_dataset_kwds ):
+ library = self.new_private_library( name )
+ payload, files = self.create_dataset_request( library, **create_dataset_kwds )
+ url_rel = "libraries/%s/contents" % ( library[ "id" ] )
+ dataset = self.api_test_case.galaxy_interactor.post( url_rel, payload, files=files ).json()[0]
+
+ def show():
+ return self.api_test_case.galaxy_interactor.get( "libraries/%s/contents/%s" % ( library[ "id" ], dataset[ "id" ] ) )
+
+ wait_on_state(show)
+ return show().json()
+
+
+def wait_on_state( state_func, assert_ok=False, timeout=5 ):
+ delta = .1
+ iteration = 0
+ while True:
+ if (delta * iteration) > timeout:
+ assert False, "Timed out waiting on state."
+ iteration += 1
+ response = state_func()
+ assert response.status_code == 200, "Failed to fetch state update while waiting."
+ state = response.json()[ "state" ]
+ if state not in [ "running", "queued", "new" ]:
+ break
+ time.sleep( delta )
+ if assert_ok:
+ assert state == "ok", "Final state - %s - not okay." % state
diff -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 test/functional/api/test_history_contents.py
--- a/test/functional/api/test_history_contents.py
+++ b/test/functional/api/test_history_contents.py
@@ -52,9 +52,16 @@
self._assert_status_code_is( create_response, 200 )
assert self.__count_contents( second_history_id ) == 1
- # TODO
- #def test_lda_copy( self ):
- # pass
+ def test_library_copy( self ):
+ ld = LibraryPopulator( self ).new_library_dataset( "lda_test_library" )
+ create_data = dict(
+ source='library',
+ content=ld[ "id" ],
+ )
+ assert self.__count_contents( self.history_id ) == 0
+ create_response = self._post( "histories/%s/contents" % self.history_id, create_data )
+ self._assert_status_code_is( create_response, 200 )
+ assert self.__count_contents( self.history_id ) == 1
def test_update( self ):
hda1 = self._new_dataset( self.history_id )
diff -r 10d0dd63522fa5c777d743bcd58ba5e58aa5eb25 -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 test/functional/api/test_libraries.py
--- /dev/null
+++ b/test/functional/api/test_libraries.py
@@ -0,0 +1,54 @@
+from base import api
+from .helpers import TestsDatasets
+from .helpers import LibraryPopulator
+from .helpers import wait_on_state
+
+
+class LibrariesApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+ def setUp( self ):
+ super( LibrariesApiTestCase, self ).setUp()
+ self.library_populator = LibraryPopulator( self )
+
+ def test_create( self ):
+ data = dict( name="CreateTestLibrary" )
+ create_response = self._post( "libraries", data=data, admin=True )
+ self._assert_status_code_is( create_response, 200 )
+ library = create_response.json()
+ self._assert_has_keys( library, "name" )
+ assert library[ "name" ] == "CreateTestLibrary"
+
+ def test_create_private_library_permissions( self ):
+ library = self.library_populator.new_library( "PermissionTestLibrary" )
+ library_id = library[ "id" ]
+
+ role_id = self.library_populator.user_private_role_id()
+ self.library_populator.set_permissions( library_id, role_id )
+ create_response = self._create_folder( library )
+ self._assert_status_code_is( create_response, 200 )
+
+ def test_create_dataset( self ):
+ library = self.library_populator.new_private_library( "ForCreateDatasets" )
+ payload, files = self.library_populator.create_dataset_request( library, file_type="txt", contents="create_test" )
+ create_response = self._post( "libraries/%s/contents" % library[ "id" ], payload, files=files )
+ self._assert_status_code_is( create_response, 200 )
+ library_datasets = create_response.json()
+ assert len( library_datasets ) == 1
+ library_dataset = library_datasets[ 0 ]
+
+ def show():
+ return self._get( "libraries/%s/contents/%s" % ( library[ "id" ], library_dataset[ "id" ] ) )
+
+ wait_on_state( show, assert_ok=True )
+ library_dataset = show().json()
+ self._assert_has_keys( library_dataset, "peek", "data_type" )
+ assert library_dataset[ "peek" ].find("create_test") >= 0
+ assert library_dataset[ "data_type" ] == "txt"
+
+ def _create_folder( self, library ):
+ create_data = dict(
+ folder_id=library[ "root_folder_id" ],
+ create_type="folder",
+ name="New Folder",
+ )
+ return self._post( "libraries/%s/contents" % library[ "id" ], data=create_data )
https://bitbucket.org/galaxy/galaxy-central/commits/cb6e53fe3c9f/
Changeset: cb6e53fe3c9f
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Start rework of API test mixin TestsDataset into an plain object.
Mirroring the newer WorkflowPopulator and LibraryPopulator.
Affected #: 1 file
diff -r 1741dfeaa06c3f63f27da96c12ab92950eda7286 -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 test/functional/api/helpers.py
--- a/test/functional/api/helpers.py
+++ b/test/functional/api/helpers.py
@@ -10,27 +10,46 @@
workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" )
-# TODO: Rework this so it is a stand-alone object like workflow
-# populator below instead of mixin.
+# Deprecated mixin, use dataset populator instead.
+# TODO: Rework existing tests to target DatasetPopulator in a setup method instead.
class TestsDatasets:
def _new_dataset( self, history_id, content='TestData123', **kwds ):
- payload = self._upload_payload( history_id, content, **kwds )
- run_response = self._post( "tools", data=payload )
- self._assert_status_code_is( run_response, 200 )
+ return DatasetPopulator( self.galaxy_interactor ).new_dataset( history_id, content=content, **kwds)
+
+ def _wait_for_history( self, history_id, assert_ok=False ):
+ return DatasetPopulator( self.galaxy_interactor ).wait_for_history( history_id, assert_ok=assert_ok )
+
+ def _new_history( self, **kwds ):
+ return DatasetPopulator( self.galaxy_interactor ).new_history( **kwds )
+
+ def _upload_payload( self, history_id, content, **kwds ):
+ return DatasetPopulator( self.galaxy_interactor ).upload_payload( history_id, content, **kwds )
+
+ def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
+ return DatasetPopulator( self.galaxy_interactor ).run_tool_payload( tool_id, inputs, history_id, **kwds )
+
+
+class DatasetPopulator( object ):
+
+ def __init__( self, galaxy_interactor ):
+ self.galaxy_interactor = galaxy_interactor
+
+ def new_dataset( self, history_id, content='TestData123', **kwds ):
+ payload = self.upload_payload( history_id, content, **kwds )
+ run_response = self.galaxy_interactor.post( "tools", data=payload )
return run_response.json()["outputs"][0]
- def _wait_for_history( self, history_id, assert_ok=False ):
- wait_on_state( lambda: self._get( "histories/%s" % history_id ), assert_ok=assert_ok )
+ def wait_for_history( self, history_id, assert_ok=False ):
+ wait_on_state( lambda: self.galaxy_interactor.get( "histories/%s" % history_id ), assert_ok=assert_ok )
- def _new_history( self, **kwds ):
+ def new_history( self, **kwds ):
name = kwds.get( "name", "API Test History" )
- create_history_response = self._post( "histories", data=dict( name=name ) )
- self._assert_status_code_is( create_history_response, 200 )
+ create_history_response = self.galaxy_interactor.post( "histories", data=dict( name=name ) )
history_id = create_history_response.json()[ "id" ]
return history_id
- def _upload_payload( self, history_id, content, **kwds ):
+ def upload_payload( self, history_id, content, **kwds ):
name = kwds.get( "name", "Test Dataset" )
dbkey = kwds.get( "dbkey", "?" )
file_type = kwds.get( "file_type", 'txt' )
@@ -44,14 +63,14 @@
upload_params[ "files_0|to_posix_lines"] = kwds[ "to_posix_lines" ]
if "space_to_tab" in kwds:
upload_params[ "files_0|space_to_tab" ] = kwds[ "space_to_tab" ]
- return self._run_tool_payload(
+ return self.run_tool_payload(
tool_id='upload1',
inputs=upload_params,
history_id=history_id,
upload_type='upload_dataset'
)
- def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
+ def run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
return dict(
tool_id=tool_id,
inputs=json.dumps(inputs),
https://bitbucket.org/galaxy/galaxy-central/commits/94e9f2c1ea68/
Changeset: 94e9f2c1ea68
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Rework history.contents_iter to potentially support multiple types.
This is utilized downstream in collections work to allow a similar API to be used for loading HistoryDatasetCollectionAssociations as HistoryDatasetAssociations.
Affected #: 3 files
diff -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -19,6 +19,7 @@
import time
from string import Template
from itertools import ifilter
+from itertools import chain
import galaxy.datatypes
import galaxy.datatypes.registry
@@ -964,24 +965,37 @@
"""
Fetch filtered list of contents of history.
"""
- python_filter = None
+ default_contents_types = [
+ 'dataset',
+ ]
+ types = kwds.get('types', default_contents_types)
+ iters = []
+ if 'dataset' in types:
+ iters.append( self.__dataset_contents_iter( **kwds ) )
+ return galaxy.util.merge_sorted_iterables( operator.attrgetter( "hid" ), *iters )
+
+ def __dataset_contents_iter(self, **kwds):
+ return self.__filter_contents( HistoryDatasetAssociation, **kwds )
+
+ def __filter_contents( self, content_class, **kwds ):
db_session = object_session( self )
assert db_session != None
- query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id )
- query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() )
+ query = db_session.query( content_class ).filter( content_class.table.c.history_id == self.id )
+ query = query.order_by( content_class.table.c.hid.asc() )
+ python_filter = None
deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) )
if deleted is not None:
- query = query.filter( HistoryDatasetAssociation.deleted == deleted )
+ query = query.filter( content_class.deleted == deleted )
visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) )
if visible is not None:
- query = query.filter( HistoryDatasetAssociation.visible == visible )
+ query = query.filter( content_class.visible == visible )
if 'ids' in kwds:
ids = kwds['ids']
max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH)
if len(ids) < max_in_filter_length:
- query = query.filter( HistoryDatasetAssociation.id.in_(ids) )
+ query = query.filter( content_class.id.in_(ids) )
else:
- python_filter = lambda hda: hda.id in ids
+ python_filter = lambda content: content.id in ids
if python_filter:
return ifilter(python_filter, query)
else:
diff -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -427,6 +427,59 @@
return os.path.commonprefix( [ file, directory ] ) == directory
+def merge_sorted_iterables( operator, *iterables ):
+ """
+
+ >>> operator = lambda x: x
+ >>> list( merge_sorted_iterables( operator, [1,2,3], [4,5] ) )
+ [1, 2, 3, 4, 5]
+ >>> list( merge_sorted_iterables( operator, [4, 5], [1,2,3] ) )
+ [1, 2, 3, 4, 5]
+ >>> list( merge_sorted_iterables( operator, [1, 4, 5], [2], [3] ) )
+ [1, 2, 3, 4, 5]
+ """
+ first_iterable = iterables[ 0 ]
+ if len( iterables ) == 1:
+ for el in first_iterable:
+ yield el
+ else:
+ for el in __merge_two_sorted_iterables(
+ operator,
+ iter( first_iterable ),
+ merge_sorted_iterables( operator, *iterables[ 1: ] )
+ ):
+ yield el
+
+
+def __merge_two_sorted_iterables( operator, iterable1, iterable2 ):
+ unset = object()
+ continue_merge = True
+ next_1 = unset
+ next_2 = unset
+ while continue_merge:
+ try:
+ if next_1 is unset:
+ next_1 = next( iterable1 )
+ if next_2 is unset:
+ next_2 = next( iterable2 )
+ if operator( next_2 ) < operator( next_1 ):
+ yield next_2
+ next_2 = unset
+ else:
+ yield next_1
+ next_1 = unset
+ except StopIteration:
+ continue_merge = False
+ if next_1 is not unset:
+ yield next_1
+ if next_2 is not unset:
+ yield next_2
+ for el in iterable1:
+ yield el
+ for el in iterable2:
+ yield el
+
+
class Params( object ):
"""
Stores and 'sanitizes' parameters. Alphanumeric characters and the
diff -r cb6e53fe3c9fda4de94f3f34b6fa87ed2e218a54 -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -51,7 +51,13 @@
else:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True )
- contents_kwds = {}
+ types = kwd.get( 'types', None ) or []
+ if types:
+ types = util.listify(types)
+ else:
+ types = ['datasets']
+
+ contents_kwds = {'types': types}
if ids:
ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) )
contents_kwds[ 'ids' ] = ids
https://bitbucket.org/galaxy/galaxy-central/commits/eb02193da452/
Changeset: eb02193da452
User: jmchilton
Date: 2014-02-28 01:06:36
Summary: Rework history_contents API to allow room for multiple 'type's of history contents.
Downstream dataset collections are being added and can be associated with histories. They have hids, can be deleted, etc... many of the same properties as HDAs. This refactoring "makes room" for these in the history contents API by allowing type to be passed in.
Since HDAs and HDCAs can have the same id - the path history/{history_id}/contents/{contents_id} is somewhat ambigious - so this changeset adds the path history/{history_id}/contents/datasets/{history_dataset_id} while a matching resource path downstream history/{history_id}/contents/dataset_collections/{history_dataset_collection_id} has been added.
The vanilla contents path remains but should be considered deprecated IMO.
Adding type parameter to history/{history_id}/contents index route - this will always default to dataset for backward compatibility - but downstream can be specified as either dataset, dataset_collection, or "dataset,dataset_collection".
Affected #: 2 files
diff -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b -r eb02193da4526756703f94ac14ceb189fd9461db lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -33,6 +33,9 @@
:param history_id: encoded id string of the HDA's History
:type ids: str
:param ids: (optional) a comma separated list of encoded `HDA` ids
+ :param types: (optional) kinds of contents to index (currently just
+ dataset, but dataset_collection will be added shortly).
+ :type types: str
:rtype: list
:returns: dictionaries containing summary or detailed HDA information
@@ -51,11 +54,14 @@
else:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True )
- types = kwd.get( 'types', None ) or []
+ # Allow passing in type or types - for continuity rest of methods
+ # take in type - but this one can be passed multiple types and
+ # type=dataset,dataset_collection is a bit silly.
+ types = kwd.get( 'type', kwd.get( 'types', None ) ) or []
if types:
types = util.listify(types)
else:
- types = ['datasets']
+ types = [ 'dataset' ]
contents_kwds = {'types': types}
if ids:
@@ -72,13 +78,14 @@
if details and details != 'all':
details = util.listify( details )
- for hda in history.contents_iter( **contents_kwds ):
- encoded_hda_id = trans.security.encode_id( hda.id )
- detailed = details == 'all' or ( encoded_hda_id in details )
- if detailed:
- rval.append( self._detailed_hda_dict( trans, hda ) )
- else:
- rval.append( self._summary_hda_dict( trans, history_id, hda ) )
+ for content in history.contents_iter( **contents_kwds ):
+ if isinstance(content, trans.app.model.HistoryDatasetAssociation):
+ encoded_content_id = trans.security.encode_id( content.id )
+ detailed = details == 'all' or ( encoded_content_id in details )
+ if detailed:
+ rval.append( self._detailed_hda_dict( trans, content ) )
+ else:
+ rval.append( self._summary_hda_dict( trans, history_id, content ) )
except Exception, e:
# for errors that are not specific to one hda (history lookup or summary list)
rval = "Error in history API at listing contents: " + str( e )
@@ -144,6 +151,13 @@
:returns: dictionary containing detailed HDA information
.. seealso:: :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_hda_dict`
"""
+ contents_type = kwd.get('type', 'dataset')
+ if contents_type == 'dataset':
+ return self.__show_dataset( trans, id, history_id, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( trans, contents_type )
+
+ def __show_dataset( self, trans, id, history_id, **kwd ):
try:
hda = self.get_history_dataset_association_from_ids( trans, id, history_id )
hda_dict = self.get_hda_dict( trans, hda )
@@ -184,11 +198,6 @@
#TODO: copy existing, accessible hda - dataset controller, copy_datasets
#TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types
# check parameters
- source = payload.get('source', None)
- content = payload.get('content', None)
- if source not in ['library', 'hda'] or content is None:
- trans.response.status = 400
- return "Please define the source ('library' or 'hda') and the content."
# retrieve history
try:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False )
@@ -196,6 +205,18 @@
# no way to tell if it failed bc of perms or other (all MessageExceptions)
trans.response.status = 500
return str( e )
+ type = payload.get('type', 'dataset')
+ if type == 'dataset':
+ return self.__create_dataset( trans, history, payload, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( trans, type )
+
+ def __create_dataset( self, trans, history, payload, **kwd ):
+ source = payload.get('source', None)
+ content = payload.get('content', None)
+ if source not in ['library', 'hda'] or content is None:
+ trans.response.status = 400
+ return "Please define the source ('library' or 'hda') and the content."
# copy from library dataset
if source == 'library':
# get library data set
@@ -227,7 +248,7 @@
return str( msg_exc )
except Exception, exc:
trans.response.status = 500
- log.exception( "history: %s, source: %s, content: %s", history_id, source, content )
+ log.exception( "history: %s, source: %s, content: %s", trans.security.encode_id(history.id), source, content )
return str( exc )
data_copy=hda.copy( copy_children=True )
result=history.add_dataset( data_copy )
@@ -261,6 +282,13 @@
any values that were different from the original and, therefore, updated
"""
#TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
+ contents_type = kwd.get('type', 'dataset')
+ if contents_type == "dataset":
+ return self.__update_dataset( trans, history_id, id, payload, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( contents_type )
+
+ def __update_dataset( self, trans, history_id, id, payload, **kwd ):
changed = {}
try:
# anon user
@@ -329,6 +357,13 @@
* deleted: if the history was marked as deleted,
* purged: if the history was purged
"""
+ contents_type = kwd.get('type', 'dataset')
+ if contents_type == "dataset":
+ return self.__delete_dataset( trans, history_id, id, purge=purge, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( trans, contents_type )
+
+ def __delete_dataset( self, trans, history_id, id, purge, **kwd ):
# get purge from the query or from the request body payload (a request body is optional here)
purge = util.string_as_bool( purge )
if kwd.get( 'payload', None ):
@@ -413,3 +448,8 @@
pass
#log.warn( 'unknown key: %s', str( key ) )
return validated_payload
+
+ def __handle_unknown_contents_type( self, trans, contents_type ):
+ # TODO: raise a message exception instead of setting status and returning dict.
+ trans.response.status = 400
+ return { 'error': 'Unknown contents type %s' % type }
diff -r 94e9f2c1ea688ed5b66fbfc8766b50d053efed1b -r eb02193da4526756703f94ac14ceb189fd9461db lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -75,6 +75,19 @@
webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app )
+ valid_history_contents_types = [
+ 'dataset',
+ ]
+ # This must come before history contents below.
+ # Accesss HDA details via histories/:history_id/contents/datasets/:hda_id
+ webapp.mapper.resource( "typed_content",
+ "{type:%s}s" % "|".join( valid_history_contents_types ),
+ name_prefix="history_content_",
+ controller='history_contents',
+ path_prefix='/api/histories/:history_id/contents',
+ parent_resources=dict( member_name='history', collection_name='histories' ),
+ )
+ # Legacy access to HDA details via histories/:history_id/contents/:hda_id
webapp.mapper.resource( 'content',
'contents',
controller='history_contents',
https://bitbucket.org/galaxy/galaxy-central/commits/24f45ea024e4/
Changeset: 24f45ea024e4
User: jmchilton
Date: 2014-03-04 14:25:07
Summary: Merged in jmchilton/galaxy-central-fork-1 (pull request #342)
Refactor History Contents to Support Multiple Types
Affected #: 9 files
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -19,6 +19,7 @@
import time
from string import Template
from itertools import ifilter
+from itertools import chain
import galaxy.datatypes
import galaxy.datatypes.registry
@@ -964,24 +965,37 @@
"""
Fetch filtered list of contents of history.
"""
- python_filter = None
+ default_contents_types = [
+ 'dataset',
+ ]
+ types = kwds.get('types', default_contents_types)
+ iters = []
+ if 'dataset' in types:
+ iters.append( self.__dataset_contents_iter( **kwds ) )
+ return galaxy.util.merge_sorted_iterables( operator.attrgetter( "hid" ), *iters )
+
+ def __dataset_contents_iter(self, **kwds):
+ return self.__filter_contents( HistoryDatasetAssociation, **kwds )
+
+ def __filter_contents( self, content_class, **kwds ):
db_session = object_session( self )
assert db_session != None
- query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id )
- query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() )
+ query = db_session.query( content_class ).filter( content_class.table.c.history_id == self.id )
+ query = query.order_by( content_class.table.c.hid.asc() )
+ python_filter = None
deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) )
if deleted is not None:
- query = query.filter( HistoryDatasetAssociation.deleted == deleted )
+ query = query.filter( content_class.deleted == deleted )
visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) )
if visible is not None:
- query = query.filter( HistoryDatasetAssociation.visible == visible )
+ query = query.filter( content_class.visible == visible )
if 'ids' in kwds:
ids = kwds['ids']
max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH)
if len(ids) < max_in_filter_length:
- query = query.filter( HistoryDatasetAssociation.id.in_(ids) )
+ query = query.filter( content_class.id.in_(ids) )
else:
- python_filter = lambda hda: hda.id in ids
+ python_filter = lambda content: content.id in ids
if python_filter:
return ifilter(python_filter, query)
else:
@@ -2127,6 +2141,7 @@
parent_library_id = self.folder.parent_library.id,
folder_id = self.folder_id,
model_class = self.__class__.__name__,
+ state = ldda.state,
name = ldda.name,
file_name = ldda.file_name,
uploaded_by = ldda.user.email,
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -427,6 +427,59 @@
return os.path.commonprefix( [ file, directory ] ) == directory
+def merge_sorted_iterables( operator, *iterables ):
+ """
+
+ >>> operator = lambda x: x
+ >>> list( merge_sorted_iterables( operator, [1,2,3], [4,5] ) )
+ [1, 2, 3, 4, 5]
+ >>> list( merge_sorted_iterables( operator, [4, 5], [1,2,3] ) )
+ [1, 2, 3, 4, 5]
+ >>> list( merge_sorted_iterables( operator, [1, 4, 5], [2], [3] ) )
+ [1, 2, 3, 4, 5]
+ """
+ first_iterable = iterables[ 0 ]
+ if len( iterables ) == 1:
+ for el in first_iterable:
+ yield el
+ else:
+ for el in __merge_two_sorted_iterables(
+ operator,
+ iter( first_iterable ),
+ merge_sorted_iterables( operator, *iterables[ 1: ] )
+ ):
+ yield el
+
+
+def __merge_two_sorted_iterables( operator, iterable1, iterable2 ):
+ unset = object()
+ continue_merge = True
+ next_1 = unset
+ next_2 = unset
+ while continue_merge:
+ try:
+ if next_1 is unset:
+ next_1 = next( iterable1 )
+ if next_2 is unset:
+ next_2 = next( iterable2 )
+ if operator( next_2 ) < operator( next_1 ):
+ yield next_2
+ next_2 = unset
+ else:
+ yield next_1
+ next_1 = unset
+ except StopIteration:
+ continue_merge = False
+ if next_1 is not unset:
+ yield next_1
+ if next_2 is not unset:
+ yield next_2
+ for el in iterable1:
+ yield el
+ for el in iterable2:
+ yield el
+
+
class Params( object ):
"""
Stores and 'sanitizes' parameters. Alphanumeric characters and the
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -33,6 +33,9 @@
:param history_id: encoded id string of the HDA's History
:type ids: str
:param ids: (optional) a comma separated list of encoded `HDA` ids
+ :param types: (optional) kinds of contents to index (currently just
+ dataset, but dataset_collection will be added shortly).
+ :type types: str
:rtype: list
:returns: dictionaries containing summary or detailed HDA information
@@ -51,7 +54,16 @@
else:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True )
- contents_kwds = {}
+ # Allow passing in type or types - for continuity rest of methods
+ # take in type - but this one can be passed multiple types and
+ # type=dataset,dataset_collection is a bit silly.
+ types = kwd.get( 'type', kwd.get( 'types', None ) ) or []
+ if types:
+ types = util.listify(types)
+ else:
+ types = [ 'dataset' ]
+
+ contents_kwds = {'types': types}
if ids:
ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) )
contents_kwds[ 'ids' ] = ids
@@ -66,13 +78,14 @@
if details and details != 'all':
details = util.listify( details )
- for hda in history.contents_iter( **contents_kwds ):
- encoded_hda_id = trans.security.encode_id( hda.id )
- detailed = details == 'all' or ( encoded_hda_id in details )
- if detailed:
- rval.append( self._detailed_hda_dict( trans, hda ) )
- else:
- rval.append( self._summary_hda_dict( trans, history_id, hda ) )
+ for content in history.contents_iter( **contents_kwds ):
+ if isinstance(content, trans.app.model.HistoryDatasetAssociation):
+ encoded_content_id = trans.security.encode_id( content.id )
+ detailed = details == 'all' or ( encoded_content_id in details )
+ if detailed:
+ rval.append( self._detailed_hda_dict( trans, content ) )
+ else:
+ rval.append( self._summary_hda_dict( trans, history_id, content ) )
except Exception, e:
# for errors that are not specific to one hda (history lookup or summary list)
rval = "Error in history API at listing contents: " + str( e )
@@ -138,6 +151,13 @@
:returns: dictionary containing detailed HDA information
.. seealso:: :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_hda_dict`
"""
+ contents_type = kwd.get('type', 'dataset')
+ if contents_type == 'dataset':
+ return self.__show_dataset( trans, id, history_id, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( trans, contents_type )
+
+ def __show_dataset( self, trans, id, history_id, **kwd ):
try:
hda = self.get_history_dataset_association_from_ids( trans, id, history_id )
hda_dict = self.get_hda_dict( trans, hda )
@@ -178,11 +198,6 @@
#TODO: copy existing, accessible hda - dataset controller, copy_datasets
#TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types
# check parameters
- source = payload.get('source', None)
- content = payload.get('content', None)
- if source not in ['library', 'hda'] or content is None:
- trans.response.status = 400
- return "Please define the source ('library' or 'hda') and the content."
# retrieve history
try:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False )
@@ -190,6 +205,18 @@
# no way to tell if it failed bc of perms or other (all MessageExceptions)
trans.response.status = 500
return str( e )
+ type = payload.get('type', 'dataset')
+ if type == 'dataset':
+ return self.__create_dataset( trans, history, payload, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( trans, type )
+
+ def __create_dataset( self, trans, history, payload, **kwd ):
+ source = payload.get('source', None)
+ content = payload.get('content', None)
+ if source not in ['library', 'hda'] or content is None:
+ trans.response.status = 400
+ return "Please define the source ('library' or 'hda') and the content."
# copy from library dataset
if source == 'library':
# get library data set
@@ -221,7 +248,7 @@
return str( msg_exc )
except Exception, exc:
trans.response.status = 500
- log.exception( "history: %s, source: %s, content: %s", history_id, source, content )
+ log.exception( "history: %s, source: %s, content: %s", trans.security.encode_id(history.id), source, content )
return str( exc )
data_copy=hda.copy( copy_children=True )
result=history.add_dataset( data_copy )
@@ -255,6 +282,13 @@
any values that were different from the original and, therefore, updated
"""
#TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
+ contents_type = kwd.get('type', 'dataset')
+ if contents_type == "dataset":
+ return self.__update_dataset( trans, history_id, id, payload, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( contents_type )
+
+ def __update_dataset( self, trans, history_id, id, payload, **kwd ):
changed = {}
try:
# anon user
@@ -323,6 +357,13 @@
* deleted: if the history was marked as deleted,
* purged: if the history was purged
"""
+ contents_type = kwd.get('type', 'dataset')
+ if contents_type == "dataset":
+ return self.__delete_dataset( trans, history_id, id, purge=purge, **kwd )
+ else:
+ return self.__handle_unknown_contents_type( trans, contents_type )
+
+ def __delete_dataset( self, trans, history_id, id, purge, **kwd ):
# get purge from the query or from the request body payload (a request body is optional here)
purge = util.string_as_bool( purge )
if kwd.get( 'payload', None ):
@@ -407,3 +448,8 @@
pass
#log.warn( 'unknown key: %s', str( key ) )
return validated_payload
+
+ def __handle_unknown_contents_type( self, trans, contents_type ):
+ # TODO: raise a message exception instead of setting status and returning dict.
+ trans.response.status = 400
+ return { 'error': 'Unknown contents type %s' % type }
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/webapps/galaxy/api/libraries.py
--- a/lib/galaxy/webapps/galaxy/api/libraries.py
+++ b/lib/galaxy/webapps/galaxy/api/libraries.py
@@ -130,6 +130,7 @@
new_library['description'] = description
new_library['synopsis'] = synopsis
new_library['id'] = encoded_id
+ new_library['root_folder_id'] = trans.security.encode_id( root_folder.id )
return new_library
def edit( self, trans, encoded_id, payload, **kwd ):
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -75,6 +75,19 @@
webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app )
+ valid_history_contents_types = [
+ 'dataset',
+ ]
+ # This must come before history contents below.
+ # Accesss HDA details via histories/:history_id/contents/datasets/:hda_id
+ webapp.mapper.resource( "typed_content",
+ "{type:%s}s" % "|".join( valid_history_contents_types ),
+ name_prefix="history_content_",
+ controller='history_contents',
+ path_prefix='/api/histories/:history_id/contents',
+ parent_resources=dict( member_name='history', collection_name='histories' ),
+ )
+ # Legacy access to HDA details via histories/:history_id/contents/:hda_id
webapp.mapper.resource( 'content',
'contents',
controller='history_contents',
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -461,16 +461,21 @@
def post_request( url, data, files={} ):
return __multipart_request( url, data, files, verb="POST" )
- def put_request( url ):
- return __urllib_request( url, 'PUT' )
+ def put_request( url, data=None ):
+ if isinstance( data, dict ):
+ assert False, "This test will fail, Galaxy's webob dependency does not parse out urlencoded PUT/PATCH entity data, API will receive empty payload."
+ return __urllib_request( url, 'PUT', json_str=data )
def delete_request( url ):
return __urllib_request( url, 'DELETE' )
- def __urllib_request( url, verb ):
+ def __urllib_request( url, verb, json_str=None ):
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(url)
request.get_method = lambda: verb
+ if json_str:
+ request.add_header( "content-type", "application/json" )
+ request.add_data(json_str)
try:
response = opener.open(request)
return RequestsLikeResponse( response.read(), status_code=response.getcode() )
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/functional/api/helpers.py
--- a/test/functional/api/helpers.py
+++ b/test/functional/api/helpers.py
@@ -1,6 +1,6 @@
import time
-from json import dumps
-from json import loads
+import json
+import StringIO
from pkg_resources import resource_string
# Simple workflow that takes an input and call cat wrapper on it.
@@ -10,33 +10,46 @@
workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" )
+# Deprecated mixin, use dataset populator instead.
+# TODO: Rework existing tests to target DatasetPopulator in a setup method instead.
class TestsDatasets:
def _new_dataset( self, history_id, content='TestData123', **kwds ):
- payload = self._upload_payload( history_id, content, **kwds )
- run_response = self._post( "tools", data=payload )
- self._assert_status_code_is( run_response, 200 )
+ return DatasetPopulator( self.galaxy_interactor ).new_dataset( history_id, content=content, **kwds)
+
+ def _wait_for_history( self, history_id, assert_ok=False ):
+ return DatasetPopulator( self.galaxy_interactor ).wait_for_history( history_id, assert_ok=assert_ok )
+
+ def _new_history( self, **kwds ):
+ return DatasetPopulator( self.galaxy_interactor ).new_history( **kwds )
+
+ def _upload_payload( self, history_id, content, **kwds ):
+ return DatasetPopulator( self.galaxy_interactor ).upload_payload( history_id, content, **kwds )
+
+ def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
+ return DatasetPopulator( self.galaxy_interactor ).run_tool_payload( tool_id, inputs, history_id, **kwds )
+
+
+class DatasetPopulator( object ):
+
+ def __init__( self, galaxy_interactor ):
+ self.galaxy_interactor = galaxy_interactor
+
+ def new_dataset( self, history_id, content='TestData123', **kwds ):
+ payload = self.upload_payload( history_id, content, **kwds )
+ run_response = self.galaxy_interactor.post( "tools", data=payload )
return run_response.json()["outputs"][0]
- def _wait_for_history( self, history_id, assert_ok=False ):
- while True:
- history_details_response = self._get( "histories/%s" % history_id )
- self._assert_status_code_is( history_details_response, 200 )
- history_state = history_details_response.json()[ "state" ]
- if history_state not in [ "running", "queued" ]:
- break
- time.sleep( .1 )
- if assert_ok:
- self.assertEquals( history_state, 'ok' )
+ def wait_for_history( self, history_id, assert_ok=False ):
+ wait_on_state( lambda: self.galaxy_interactor.get( "histories/%s" % history_id ), assert_ok=assert_ok )
- def _new_history( self, **kwds ):
+ def new_history( self, **kwds ):
name = kwds.get( "name", "API Test History" )
- create_history_response = self._post( "histories", data=dict( name=name ) )
- self._assert_status_code_is( create_history_response, 200 )
+ create_history_response = self.galaxy_interactor.post( "histories", data=dict( name=name ) )
history_id = create_history_response.json()[ "id" ]
return history_id
- def _upload_payload( self, history_id, content, **kwds ):
+ def upload_payload( self, history_id, content, **kwds ):
name = kwds.get( "name", "Test Dataset" )
dbkey = kwds.get( "dbkey", "?" )
file_type = kwds.get( "file_type", 'txt' )
@@ -50,17 +63,17 @@
upload_params[ "files_0|to_posix_lines"] = kwds[ "to_posix_lines" ]
if "space_to_tab" in kwds:
upload_params[ "files_0|space_to_tab" ] = kwds[ "space_to_tab" ]
- return self._run_tool_payload(
+ return self.run_tool_payload(
tool_id='upload1',
inputs=upload_params,
history_id=history_id,
upload_type='upload_dataset'
)
- def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
+ def run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
return dict(
tool_id=tool_id,
- inputs=dumps(inputs),
+ inputs=json.dumps(inputs),
history_id=history_id,
**kwds
)
@@ -73,7 +86,7 @@
self.api_test_case = api_test_case
def load_workflow( self, name, content=workflow_str, add_pja=False ):
- workflow = loads( content )
+ workflow = json.loads( content )
workflow[ "name" ] = name
if add_pja:
tool_step = workflow[ "steps" ][ "2" ]
@@ -93,9 +106,99 @@
def create_workflow( self, workflow, **create_kwds ):
data = dict(
- workflow=dumps( workflow ),
+ workflow=json.dumps( workflow ),
**create_kwds
)
upload_response = self.api_test_case._post( "workflows/upload", data=data )
uploaded_workflow_id = upload_response.json()[ "id" ]
return uploaded_workflow_id
+
+
+class LibraryPopulator( object ):
+
+ def __init__( self, api_test_case ):
+ self.api_test_case = api_test_case
+ self.galaxy_interactor = api_test_case.galaxy_interactor
+
+ def new_private_library( self, name ):
+ library = self.new_library( name )
+ library_id = library[ "id" ]
+
+ role_id = self.user_private_role_id()
+ self.set_permissions( library_id, role_id )
+ return library
+
+ def new_library( self, name ):
+ data = dict( name=name )
+ create_response = self.galaxy_interactor.post( "libraries", data=data, admin=True )
+ return create_response.json()
+
+ def set_permissions( self, library_id, role_id=None ):
+ if role_id:
+ perm_list = json.dumps( role_id )
+ else:
+ perm_list = json.dumps( [] )
+
+ permissions = dict(
+ LIBRARY_ACCESS_in=perm_list,
+ LIBRARY_MODIFY_in=perm_list,
+ LIBRARY_ADD_in=perm_list,
+ LIBRARY_MANAGE_in=perm_list,
+ )
+ self.galaxy_interactor.post( "libraries/%s/permissions" % library_id, data=permissions, admin=True )
+
+ def user_email( self ):
+ users_response = self.galaxy_interactor.get( "users" )
+ users = users_response.json()
+ assert len( users ) == 1
+ return users[ 0 ][ "email" ]
+
+ def user_private_role_id( self ):
+ user_email = self.user_email()
+ roles_response = self.api_test_case.galaxy_interactor.get( "roles", admin=True )
+ users_roles = [ r for r in roles_response.json() if r[ "name" ] == user_email ]
+ assert len( users_roles ) == 1
+ return users_roles[ 0 ][ "id" ]
+
+ def create_dataset_request( self, library, **kwds ):
+ create_data = {
+ "folder_id": kwds.get( "folder_id", library[ "root_folder_id" ] ),
+ "create_type": "file",
+ "files_0|NAME": kwds.get( "name", "NewFile" ),
+ "upload_option": kwds.get( "upload_option", "upload_file" ),
+ "file_type": kwds.get( "file_type", "auto" ),
+ "db_key": kwds.get( "db_key", "?" ),
+ }
+ files = {
+ "files_0|file_data": kwds.get( "file", StringIO.StringIO( kwds.get( "contents", "TestData" ) ) ),
+ }
+ return create_data, files
+
+ def new_library_dataset( self, name, **create_dataset_kwds ):
+ library = self.new_private_library( name )
+ payload, files = self.create_dataset_request( library, **create_dataset_kwds )
+ url_rel = "libraries/%s/contents" % ( library[ "id" ] )
+ dataset = self.api_test_case.galaxy_interactor.post( url_rel, payload, files=files ).json()[0]
+
+ def show():
+ return self.api_test_case.galaxy_interactor.get( "libraries/%s/contents/%s" % ( library[ "id" ], dataset[ "id" ] ) )
+
+ wait_on_state(show)
+ return show().json()
+
+
+def wait_on_state( state_func, assert_ok=False, timeout=5 ):
+ delta = .1
+ iteration = 0
+ while True:
+ if (delta * iteration) > timeout:
+ assert False, "Timed out waiting on state."
+ iteration += 1
+ response = state_func()
+ assert response.status_code == 200, "Failed to fetch state update while waiting."
+ state = response.json()[ "state" ]
+ if state not in [ "running", "queued", "new" ]:
+ break
+ time.sleep( delta )
+ if assert_ok:
+ assert state == "ok", "Final state - %s - not okay." % state
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/functional/api/test_history_contents.py
--- /dev/null
+++ b/test/functional/api/test_history_contents.py
@@ -0,0 +1,111 @@
+from base import api
+import json
+
+from .helpers import TestsDatasets
+from .helpers import LibraryPopulator
+from base.interactor import (
+ put_request,
+ delete_request,
+)
+
+
+# TODO: Test anonymous access.
+class HistoryContentsApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+ def setUp( self ):
+ super( HistoryContentsApiTestCase, self ).setUp()
+ self.history_id = self._new_history()
+
+ def test_index_hda_summary( self ):
+ hda1 = self._new_dataset( self.history_id )
+ contents_response = self._get( "histories/%s/contents" % self.history_id )
+ hda_summary = self.__check_for_hda( contents_response, hda1 )
+ assert "display_types" not in hda_summary # Quick summary, not full details
+
+ def test_index_hda_all_details( self ):
+ hda1 = self._new_dataset( self.history_id )
+ contents_response = self._get( "histories/%s/contents?details=all" % self.history_id )
+ hda_details = self.__check_for_hda( contents_response, hda1 )
+ self.__assert_hda_has_full_details( hda_details )
+
+ def test_index_hda_detail_by_id( self ):
+ hda1 = self._new_dataset( self.history_id )
+ contents_response = self._get( "histories/%s/contents?details=%s" % ( self.history_id, hda1[ "id" ] ) )
+ hda_details = self.__check_for_hda( contents_response, hda1 )
+ self.__assert_hda_has_full_details( hda_details )
+
+ def test_show_hda( self ):
+ hda1 = self._new_dataset( self.history_id )
+ show_response = self.__show( hda1 )
+ self._assert_status_code_is( show_response, 200 )
+ self.__assert_matches_hda( hda1, show_response.json() )
+
+ def test_hda_copy( self ):
+ hda1 = self._new_dataset( self.history_id )
+ create_data = dict(
+ source='hda',
+ content=hda1[ "id" ],
+ )
+ second_history_id = self._new_history()
+ assert self.__count_contents( second_history_id ) == 0
+ create_response = self._post( "histories/%s/contents" % second_history_id, create_data )
+ self._assert_status_code_is( create_response, 200 )
+ assert self.__count_contents( second_history_id ) == 1
+
+ def test_library_copy( self ):
+ ld = LibraryPopulator( self ).new_library_dataset( "lda_test_library" )
+ create_data = dict(
+ source='library',
+ content=ld[ "id" ],
+ )
+ assert self.__count_contents( self.history_id ) == 0
+ create_response = self._post( "histories/%s/contents" % self.history_id, create_data )
+ self._assert_status_code_is( create_response, 200 )
+ assert self.__count_contents( self.history_id ) == 1
+
+ def test_update( self ):
+ hda1 = self._new_dataset( self.history_id )
+ self._wait_for_history( self.history_id )
+ assert str( hda1[ "deleted" ] ).lower() == "false"
+ update_url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1[ "id" ] ), use_key=True )
+ # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
+ body = json.dumps( dict( deleted=True ) )
+ update_response = put_request( update_url, data=body )
+ self._assert_status_code_is( update_response, 200 )
+ show_response = self.__show( hda1 )
+ assert str( show_response.json()[ "deleted" ] ).lower() == "true"
+
+ def test_delete( self ):
+ hda1 = self._new_dataset( self.history_id )
+ self._wait_for_history( self.history_id )
+ assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "false"
+ url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1["id" ] ), use_key=True )
+ delete_response = delete_request( url )
+ assert delete_response.status_code < 300 # Something in the 200s :).
+ assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "true"
+
+ def __show( self, hda ):
+ show_response = self._get( "histories/%s/contents/%s" % ( self.history_id, hda[ "id" ] ) )
+ return show_response
+
+ def __count_contents( self, history_id=None, **kwds ):
+ if history_id == None:
+ history_id = self.history_id
+ contents_response = self._get( "histories/%s/contents" % history_id, kwds )
+ return len( contents_response.json() )
+
+ def __assert_hda_has_full_details( self, hda_details ):
+ self._assert_has_keys( hda_details, "display_types", "display_apps" )
+
+ def __check_for_hda( self, contents_response, hda ):
+ self._assert_status_code_is( contents_response, 200 )
+ contents = contents_response.json()
+ assert len( contents ) == 1
+ hda_summary = contents[ 0 ]
+ self.__assert_matches_hda( hda, hda_summary )
+ return hda_summary
+
+ def __assert_matches_hda( self, input_hda, query_hda ):
+ self._assert_has_keys( query_hda, "id", "name" )
+ assert input_hda[ "name" ] == query_hda[ "name" ]
+ assert input_hda[ "id" ] == query_hda[ "id" ]
diff -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e -r 24f45ea024e47f0e6ee8c427c7450e1f79d0c525 test/functional/api/test_libraries.py
--- /dev/null
+++ b/test/functional/api/test_libraries.py
@@ -0,0 +1,54 @@
+from base import api
+from .helpers import TestsDatasets
+from .helpers import LibraryPopulator
+from .helpers import wait_on_state
+
+
+class LibrariesApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+ def setUp( self ):
+ super( LibrariesApiTestCase, self ).setUp()
+ self.library_populator = LibraryPopulator( self )
+
+ def test_create( self ):
+ data = dict( name="CreateTestLibrary" )
+ create_response = self._post( "libraries", data=data, admin=True )
+ self._assert_status_code_is( create_response, 200 )
+ library = create_response.json()
+ self._assert_has_keys( library, "name" )
+ assert library[ "name" ] == "CreateTestLibrary"
+
+ def test_create_private_library_permissions( self ):
+ library = self.library_populator.new_library( "PermissionTestLibrary" )
+ library_id = library[ "id" ]
+
+ role_id = self.library_populator.user_private_role_id()
+ self.library_populator.set_permissions( library_id, role_id )
+ create_response = self._create_folder( library )
+ self._assert_status_code_is( create_response, 200 )
+
+ def test_create_dataset( self ):
+ library = self.library_populator.new_private_library( "ForCreateDatasets" )
+ payload, files = self.library_populator.create_dataset_request( library, file_type="txt", contents="create_test" )
+ create_response = self._post( "libraries/%s/contents" % library[ "id" ], payload, files=files )
+ self._assert_status_code_is( create_response, 200 )
+ library_datasets = create_response.json()
+ assert len( library_datasets ) == 1
+ library_dataset = library_datasets[ 0 ]
+
+ def show():
+ return self._get( "libraries/%s/contents/%s" % ( library[ "id" ], library_dataset[ "id" ] ) )
+
+ wait_on_state( show, assert_ok=True )
+ library_dataset = show().json()
+ self._assert_has_keys( library_dataset, "peek", "data_type" )
+ assert library_dataset[ "peek" ].find("create_test") >= 0
+ assert library_dataset[ "data_type" ] == "txt"
+
+ def _create_folder( self, library ):
+ create_data = dict(
+ folder_id=library[ "root_folder_id" ],
+ create_type="folder",
+ name="New Folder",
+ )
+ return self._post( "libraries/%s/contents" % library[ "id" ], data=create_data )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Delimit Biostar tags by commas.
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6956c4b2d9cd/
Changeset: 6956c4b2d9cd
User: dan
Date: 2014-03-03 23:21:50
Summary: Delimit Biostar tags by commas.
Affected #: 1 file
diff -r b6138d0f8a753cfcc76881d18c0e4cac6298a921 -r 6956c4b2d9cdf2244604dd2782c4c5667c52868e lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -135,6 +135,6 @@
# Tool specific information for payload
payload = { 'title':'Need help with "%s" tool' % ( tool.name ),
'content': '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s</p>' % ( tool.name, tool.version, tool.id ),
- 'tag_val': 'galaxy ' + tag_for_tool( tool ) }
+ 'tag_val': ','.join( [ 'galaxy', tag_for_tool( tool ) ] ) }
# Pass on to regular question method
return self.biostar_question_redirect( trans, payload )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: davebgx: Update setup_r_environment to work with the elimination of fabric.
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b6138d0f8a75/
Changeset: b6138d0f8a75
User: davebgx
Date: 2014-03-03 21:35:26
Summary: Update setup_r_environment to work with the elimination of fabric.
Affected #: 1 file
diff -r 957e0c7548626c2532af6fd00387ec1568b7dce0 -r b6138d0f8a753cfcc76881d18c0e4cac6298a921 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -296,7 +296,7 @@
retain control over the process. This method is named "complex" because it uses queues and
threads to execute a command while capturing and displaying the output.
"""
- wrapped_command = shlex.split( "/bin/sh -c '%s'" % str( command ) )
+ wrapped_command = shlex.split( '/bin/sh -c "%s"' % str( command ) )
# Launch the command as subprocess. A bufsize of 1 means line buffered.
process_handle = subprocess.Popen( wrapped_command,
stdout=subprocess.PIPE,
@@ -607,7 +607,7 @@
with settings( warn_only=True ):
for tarball_name in tarball_names:
cmd = '''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR; export R_LIBS;
- Rscript -e "install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)"''' % ( str( tarball_name ) )
+ Rscript -e \\"install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)\\"''' % ( str( tarball_name ) )
cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
return_code = handle_command( app, tool_dependency, install_dir, cmd )
if return_code:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jeremy goecks: Extend TabularDataTableView to support full page view and an embedded view.
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/957e0c754862/
Changeset: 957e0c754862
User: jeremy goecks
Date: 2014-03-03 20:04:09
Summary: Extend TabularDataTableView to support full page view and an embedded view.
Affected #: 3 files
diff -r 96bf0719d590025a818828245e66e3b08de9f828 -r 957e0c7548626c2532af6fd00387ec1568b7dce0 static/scripts/mvc/data.js
--- a/static/scripts/mvc/data.js
+++ b/static/scripts/mvc/data.js
@@ -108,13 +108,15 @@
});
/**
- * Provides table-based, dynamic view of a tabular dataset.
- * NOTE: view's el must be in DOM already and provided when
- * createing the view so that scrolling event can be attached
- * to the correct container.
+ * Provides a base for table-based, dynamic view of a tabular dataset.
+ * Do not instantiate directly; use either TopLevelTabularDatasetChunkedView
+ * or EmbeddedTabularDatasetChunkedView.
*/
var TabularDatasetChunkedView = Backbone.View.extend({
+ /**
+ * Initialize view and, importantly, set a scroll element.
+ */
initialize: function(options) {
// Row count for rendering.
this.row_count = 0;
@@ -147,25 +149,15 @@
}
// -- Show new chunks during scrolling. --
-
+
var self = this,
- // Element that does the scrolling.
- scroll_elt = _.find(this.$el.parents(), function(p) {
- return $(p).css('overflow') === 'auto';
- }),
// Flag to ensure that only one chunk is loaded at a time.
loading_chunk = false;
- // If no scrolling element found, use window.
- if (!scroll_elt) { scroll_elt = window; }
-
- // Wrap scrolling element for easy access.
- scroll_elt = $(scroll_elt);
-
// Set up chunk loading when scrolling using the scrolling element.
- scroll_elt.scroll(function() {
+ this.scroll_elt.scroll(function() {
// If not already loading a chunk and have scrolled to the bottom of this element, get next chunk.
- if ( !loading_chunk && (self.$el.height() - scroll_elt.scrollTop() - scroll_elt.height() <= 0) ) {
+ if ( !loading_chunk && self.scrolled_to_bottom() ) {
loading_chunk = true;
$.when(self.model.get_next_chunk()).then(function(result) {
if (result) {
@@ -182,6 +174,13 @@
});
},
+ /**
+ * Returns true if user has scrolled to the bottom of the view.
+ */
+ scrolled_to_bottom: function() {
+ return false;
+ },
+
// -- Helper functions. --
_renderCell: function(cell_contents, index, colspan) {
@@ -246,6 +245,61 @@
}
});
+/**
+ * Tabular view that is placed at the top level of page. Scrolling occurs
+ * view top-level elements outside of view.
+ */
+var TopLevelTabularDatasetChunkedView = TabularDatasetChunkedView.extend({
+
+ initialize: function(options) {
+ TabularDatasetChunkedView.prototype.initialize.call(this, options);
+
+ // Scrolling happens in top-level elements.
+ scroll_elt = _.find(this.$el.parents(), function(p) {
+ return $(p).css('overflow') === 'auto';
+ });
+
+ // If no scrolling element found, use window.
+ if (!scroll_elt) { scroll_elt = window; }
+
+ // Wrap scrolling element for easy access.
+ this.scroll_elt = $(scroll_elt);
+ },
+
+ /**
+ * Returns true if user has scrolled to the bottom of the view.
+ */
+ scrolled_to_bottom: function() {
+ return (this.$el.height() - this.scroll_elt.scrollTop() - this.scroll_elt.height() <= 0);
+ }
+
+});
+
+/**
+ * Tabular view tnat is embedded in a page. Scrolling occurs in view's el.
+ */
+var EmbeddedTabularDatasetChunkedView = TabularDatasetChunkedView.extend({
+
+ initialize: function(options) {
+ TabularDatasetChunkedView.prototype.initialize.call(this, options);
+
+ // Because view is embedded, set up div to do scrolling.
+ this.scroll_elt = this.$el.css({
+ position: 'relative',
+ overflow: 'scroll',
+ height: this.options.height || '500px'
+ });
+ },
+
+ /**
+ * Returns true if user has scrolled to the bottom of the view.
+ */
+ scrolled_to_bottom: function() {
+ return this.$el.scrollTop() + this.$el.innerHeight() >= this.el.scrollHeight;
+ }
+
+});
+
// button for trackster visualization
var TabularButtonTracksterView = Backbone.View.extend(
{
@@ -513,15 +567,28 @@
* Create a tabular dataset chunked view (and requisite tabular dataset model)
* and appends to parent_elt.
*/
-var createTabularDatasetChunkedView = function(dataset_config, parent_elt) {
- // Create view element and add to parent.
- var view_div = $('<div/>').appendTo(parent_elt);
+var createTabularDatasetChunkedView = function(options) {
+ // Create and set model.
+ options.model = new TabularDataset(options.dataset_config);
- // default viewer
- return new TabularDatasetChunkedView({
- el: view_div,
- model: new TabularDataset(dataset_config)
- }).render();
+ var parent_elt = options.parent_elt;
+ var embedded = options.embedded;
+
+ // Clean up options so that only needed options are passed to view.
+ delete options.embedded;
+ delete options.parent_elt;
+ delete options.dataset_config;
+
+ // Create and set up view.
+ var view = (embedded ? new EmbeddedTabularDatasetChunkedView(options) :
+ new TopLevelTabularDatasetChunkedView(options));
+ view.render();
+
+ if (parent_elt) {
+ parent_elt.append(view.$el);
+ }
+
+ return view;
};
return {
diff -r 96bf0719d590025a818828245e66e3b08de9f828 -r 957e0c7548626c2532af6fd00387ec1568b7dce0 templates/webapps/galaxy/dataset/display.mako
--- a/templates/webapps/galaxy/dataset/display.mako
+++ b/templates/webapps/galaxy/dataset/display.mako
@@ -23,18 +23,18 @@
//
$('.page-body').children().remove();
- data.createTabularDatasetChunkedView(
- // Dataset config. TODO: encode id.
- _.extend( ${h.to_json_string( item.to_dict() )},
- {
- chunk_url: "${h.url_for( controller='/dataset', action='display',
- dataset_id=trans.security.encode_id( item.id ))}",
- first_data_chunk: ${first_chunk}
- }
- ),
- // Append view to body.
- $('.page-body')
- );
+ data.createTabularDatasetChunkedView({
+ // TODO: encode id.
+ dataset_config:
+ _.extend( ${h.to_json_string( item.to_dict() )},
+ {
+ chunk_url: "${h.url_for( controller='/dataset', action='display',
+ dataset_id=trans.security.encode_id( item.id ))}",
+ first_data_chunk: ${first_chunk}
+ }
+ ),
+ parent_elt: $('.page-body')
+ });
});
</script>
diff -r 96bf0719d590025a818828245e66e3b08de9f828 -r 957e0c7548626c2532af6fd00387ec1568b7dce0 templates/webapps/galaxy/dataset/tabular_chunked.mako
--- a/templates/webapps/galaxy/dataset/tabular_chunked.mako
+++ b/templates/webapps/galaxy/dataset/tabular_chunked.mako
@@ -17,8 +17,8 @@
});
require(['mvc/data'], function(data) {
- data.createTabularDatasetChunkedView(
- _.extend( ${h.to_json_string( trans.security.encode_dict_ids( dataset.to_dict() ) )},
+ data.createTabularDatasetChunkedView({
+ dataset_config: _.extend( ${h.to_json_string( trans.security.encode_dict_ids( dataset.to_dict() ) )},
{
url_viz: "${h.url_for( controller='/visualization')}",
chunk_url: "${h.url_for( controller='/dataset', action='display',
@@ -26,9 +26,8 @@
first_data_chunk: ${chunk}
}
),
- // Append view to body.
- $('body')
- );
+ parent_elt: $('body')
+ });
});
</script></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: afgane: Add kwd to get_dbkeys method signature to fix the API call that includes the actual API key in the request
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/96bf0719d590/
Changeset: 96bf0719d590
User: afgane
Date: 2014-03-03 17:39:40
Summary: Add kwd to get_dbkeys method signature to fix the API call that includes the actual API key in the request
Affected #: 1 file
diff -r edd17b90c126d73451fe0a080ba4c75cff5a92e5 -r 96bf0719d590025a818828245e66e3b08de9f828 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -210,7 +210,7 @@
rval = self.genomes[ dbkey ]
return rval
- def get_dbkeys( self, trans, chrom_info=False ):
+ def get_dbkeys( self, trans, chrom_info=False, **kwd ):
""" Returns all known dbkeys. If chrom_info is True, only dbkeys with
chromosome lengths are returned. """
dbkeys = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Use qualified url for biostar_url_redirect due to javascript always prepending galaxy base prefix (e.g. usually /) unless url contains //
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/edd17b90c126/
Changeset: edd17b90c126
User: dan
Date: 2014-03-03 16:09:19
Summary: Use qualified url for biostar_url_redirect due to javascript always prepending galaxy base prefix (e.g. usually /) unless url contains //
Affected #: 1 file
diff -r 4da230085f06db45563ab3ee27d07848dd8950de -r edd17b90c126d73451fe0a080ba4c75cff5a92e5 templates/webapps/galaxy/galaxy.masthead.mako
--- a/templates/webapps/galaxy/galaxy.masthead.mako
+++ b/templates/webapps/galaxy/galaxy.masthead.mako
@@ -40,7 +40,7 @@
'enable_cloud_launch' : app.config.get_bool('enable_cloud_launch', False),
'lims_doc_url' : app.config.get("lims_doc_url", "http://main.g2.bx.psu.edu/u/rkchak/p/sts"),
'biostar_url' : app.config.biostar_url,
- 'biostar_url_redirect' : h.url_for(controller='biostar', action='biostar_redirect', biostar_action='show_tag_galaxy'),
+ 'biostar_url_redirect' : h.url_for( controller='biostar', action='biostar_redirect', biostar_action='show_tag_galaxy', qualified=True ),
'support_url' : app.config.get("support_url", "http://wiki.galaxyproject.org/Support"),
'search_url' : app.config.get("search_url", "http://galaxyproject.org/search/usegalaxy/"),
'mailing_lists' : app.config.get("mailing_lists", "http://wiki.galaxyproject.org/MailingLists"),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fixes for biostar show galaxy tag redirect
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4da230085f06/
Changeset: 4da230085f06
User: dan
Date: 2014-03-03 15:57:31
Summary: Fixes for biostar show galaxy tag redirect
Affected #: 2 files
diff -r 538745257362eebda5caf25c8c16793caa90fec7 -r 4da230085f06db45563ab3ee27d07848dd8950de lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -17,8 +17,9 @@
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?(a)\[\\\]^_`{|},.]+')
BIOSTAR_ACTIONS = {
+ None: '',
'new': 'p/new/post/',
- None: ''
+ 'show_tag_galaxy': 't/galaxy/'
}
diff -r 538745257362eebda5caf25c8c16793caa90fec7 -r 4da230085f06db45563ab3ee27d07848dd8950de templates/webapps/galaxy/galaxy.masthead.mako
--- a/templates/webapps/galaxy/galaxy.masthead.mako
+++ b/templates/webapps/galaxy/galaxy.masthead.mako
@@ -40,7 +40,7 @@
'enable_cloud_launch' : app.config.get_bool('enable_cloud_launch', False),
'lims_doc_url' : app.config.get("lims_doc_url", "http://main.g2.bx.psu.edu/u/rkchak/p/sts"),
'biostar_url' : app.config.biostar_url,
- 'biostar_url_redirect' : h.url_for(controller='biostar', action='biostar_redirect', biostar_action='show/tag/galaxy'),
+ 'biostar_url_redirect' : h.url_for(controller='biostar', action='biostar_redirect', biostar_action='show_tag_galaxy'),
'support_url' : app.config.get("support_url", "http://wiki.galaxyproject.org/Support"),
'search_url' : app.config.get("search_url", "http://galaxyproject.org/search/usegalaxy/"),
'mailing_lists' : app.config.get("mailing_lists", "http://wiki.galaxyproject.org/MailingLists"),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Update Biostar interaction to use new cookie-based method.
by commits-noreply@bitbucket.org 03 Mar '14
by commits-noreply@bitbucket.org 03 Mar '14
03 Mar '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/538745257362/
Changeset: 538745257362
User: dan
Date: 2014-03-03 15:11:38
Summary: Update Biostar interaction to use new cookie-based method.
Affected #: 1 file
diff -r eafb4208db665dfeed7d8f3e6f0d2b61ce2f4fc8 -r 538745257362eebda5caf25c8c16793caa90fec7 lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -7,6 +7,7 @@
import base64
from galaxy.util import json
import hmac
+import urlparse
# Slugifying from Armin Ronacher (http://flask.pocoo.org/snippets/5/)
@@ -15,6 +16,11 @@
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?(a)\[\\\]^_`{|},.]+')
+BIOSTAR_ACTIONS = {
+ 'new': 'p/new/post/',
+ None: ''
+}
+
def slugify(text, delim=u'-'):
"""Generates an slightly worse ASCII-only slug."""
@@ -28,12 +34,9 @@
# Biostar requires all keys to be present, so we start with a template
DEFAULT_PAYLOAD = {
- 'email': "",
- 'title': "",
- 'tags': 'galaxy',
- 'tool_name': '',
- 'tool_version': '',
- 'tool_id': ''
+ 'title': '',
+ 'tag_val': 'galaxy',
+ 'content': '',
}
@@ -49,10 +52,32 @@
def tag_for_tool( tool ):
"""
- Generate a reasonavle biostar tag for a tool.
+ Generate a reasonable biostar tag for a tool.
"""
return slugify( unicode( tool.name ) )
+def determine_cookie_domain( galaxy_hostname, biostar_hostname ):
+ if galaxy_hostname == biostar_hostname:
+ return galaxy_hostname
+
+ sub_biostar_hostname = biostar_hostname.split( '.', 1 )[-1]
+ if sub_biostar_hostname == galaxy_hostname:
+ return galaxy_hostname
+
+ sub_galaxy_hostname = galaxy_hostname.split( '.', 1 )[-1]
+ if sub_biostar_hostname == sub_galaxy_hostname:
+ return sub_galaxy_hostname
+
+ return galaxy_hostname
+
+def create_cookie( trans, key_name, key, email ):
+ digest = hmac.new( key, email ).hexdigest()
+ value = "%s:%s" % (email, digest)
+ trans.set_cookie( value, name=key_name, path='/', age=90, version='1' )
+ #We need to explicitly set the domain here, in order to allow for biostar in a subdomain to work
+ galaxy_hostname = urlparse.urlsplit( url_for( '/', qualified=True ) ).hostname
+ biostar_hostname = urlparse.urlsplit( trans.app.config.biostar_url ).hostname
+ trans.response.cookies[ key_name ][ 'domain' ] = determine_cookie_domain( galaxy_hostname, biostar_hostname )
class BiostarController( BaseUIController ):
"""
@@ -65,26 +90,22 @@
Generate a redirect to a Biostar site using external authentication to
pass Galaxy user information and information about a specific tool.
"""
- payload = payload or {}
# Ensure biostar integration is enabled
if not trans.app.config.biostar_url:
return error( "Biostar integration is not enabled" )
+ if biostar_action not in BIOSTAR_ACTIONS:
+ return error( "Invalid action specified (%s)." % ( biostar_action ) )
+
# Start building up the payload
+ payload = payload or {}
payload = dict( DEFAULT_PAYLOAD, **payload )
# Do the best we can of providing user information for the payload
if trans.user:
- payload['username'] = "user-" + trans.security.encode_id( trans.user.id )
- payload['email'] = trans.user.email
- if trans.user.username:
- payload['display_name'] = trans.user.username
- else:
- payload['display_name'] = trans.user.email.split( "@" )[0]
+ email = trans.user.email
else:
- encoded = trans.security.encode_id( trans.galaxy_session.id )
- payload['username'] = "anon-" + encoded
- payload['display_name'] = "Anonymous Galaxy User"
- data, digest = encode_data( trans.app.config.biostar_key, payload )
- return trans.response.send_redirect( url_for( trans.app.config.biostar_url, data=data, digest=digest, name=trans.app.config.biostar_key_name, action=biostar_action ) )
+ email = "anon-%s" % ( trans.security.encode_id( trans.galaxy_session.id ) )
+ create_cookie( trans, trans.app.config.biostar_key_name, trans.app.config.biostar_key, email )
+ return trans.response.send_redirect( url_for( urlparse.urljoin( trans.app.config.biostar_url, BIOSTAR_ACTIONS[ biostar_action ] ), **payload ) )
@web.expose
def biostar_question_redirect( self, trans, payload=None ):
@@ -111,9 +132,8 @@
if not tool:
return error( "No tool found matching '%s'" % tool_id )
# Tool specific information for payload
- payload = { 'tool_name': tool.name,
- 'tool_version': tool.version,
- 'tool_id': tool.id,
- 'tags': 'galaxy ' + tag_for_tool( tool ) }
+ payload = { 'title':'Need help with "%s" tool' % ( tool.name ),
+ 'content': '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s</p>' % ( tool.name, tool.version, tool.id ),
+ 'tag_val': 'galaxy ' + tag_for_tool( tool ) }
# Pass on to regular question method
return self.biostar_question_redirect( trans, payload )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0