2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/75ba4f814129/
Changeset: 75ba4f814129
Branch: next-stable
User: carlfeberhard
Date: 2013-03-25 18:33:30
Summary: api/histories, show: capture job states
Affected #: 2 files
diff -r c82a139db1c98a990760e767da00fe7aeaa03d28 -r
75ba4f814129bb24ef85feb5fde99c115a707d4d lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -833,7 +833,6 @@
Returns a dictionary with state counts for history's HDAs. Key is a
dataset state, value is the number of states in that count.
"""
-
# Build query to get (state, count) pairs.
cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*'
) ]
from_obj = trans.app.model.HistoryDatasetAssociation.table.join(
trans.app.model.Dataset.table )
@@ -864,6 +863,135 @@
return state_count_dict
+ def get_hda_summary_dicts( self, trans, history ):
+ """Returns a list of dictionaries containing summary information
+ for each HDA in the given history.
+ """
+ hda_model = trans.model.HistoryDatasetAssociation
+
+ # outer join with job output to get job_state or None
+ job_subq = ( trans.sa_session.query(
+ trans.model.Job.id.label( 'job_id' ),
+ trans.model.Job.state.label( 'job_state' ),
+ trans.model.JobToOutputDatasetAssociation.dataset_id.label(
'hda_id' ) )
+ .join( trans.model.JobToOutputDatasetAssociation ) ).subquery()
+
+ # get state, name, etc.
+ columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
+ trans.model.Dataset.state,
+ job_subq.c.job_state, job_subq.c.job_id )
+ column_keys = [ "name", "hid", "id",
"deleted", "state", "job_state", "job_id" ]
+
+ query = ( trans.sa_session.query( *columns )
+ .enable_eagerloads( False )
+ .filter( hda_model.history == history )
+ .join( trans.model.Dataset )
+ .outerjoin(( job_subq, job_subq.c.hda_id == hda_model.id ))
+ .order_by( hda_model.hid ) )
+
+ # build dictionaries, adding history id and encoding all ids
+ hda_dicts = []
+ for hda_tuple in query.all():
+ hda_dict = dict( zip( column_keys, hda_tuple ) )
+ #if hda_dict[ 'job_state' ] not in [ None, 'ok' ]:
+ # print hda_dict[ 'hid' ], hda_dict[ 'name' ], hda_dict[
'job_state' ]
+ hda_dict[ 'history_id' ] = history.id
+ trans.security.encode_dict_ids( hda_dict )
+ hda_dicts.append( hda_dict )
+ return hda_dicts
+
+ def _get_hda_state_summaries( self, trans, hda_dict_list ):
+ """Returns two dictionaries (in a tuple): state_counts and
state_ids.
+ Each is keyed according to the possible hda states:
+ _counts contains a sum of the datasets in each state
+ _ids contains a list of the encoded ids for each hda in that state
+
+ hda_dict_list should be a list of hda data in dictionary form.
+ """
+ #TODO: doc to rst
+ # init counts, ids for each state
+ state_counts = {}
+ state_ids = {}
+ for key, state in trans.app.model.Dataset.states.items():
+ state_counts[ state ] = 0
+ state_ids[ state ] = []
+
+ for hda_dict in hda_dict_list:
+ item_state = hda_dict['state']
+ if not hda_dict['deleted']:
+ state_counts[ item_state ] = state_counts[ item_state ] + 1
+ # needs to return all ids (no deleted check)
+ state_ids[ item_state ].append( hda_dict['id'] )
+
+ return ( state_counts, state_ids )
+
+ def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
+ """Returns the history state based on the states of the HDAs it
contains.
+ """
+ states = trans.app.model.Dataset.states
+
+ num_hdas = sum( hda_state_counts.values() )
+ # (default to ERROR)
+ state = states.ERROR
+ if num_hdas == 0:
+ state = states.NEW
+
+ else:
+ if( ( hda_state_counts[ states.RUNNING ] > 0 )
+ or ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
+ or ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
+ state = states.RUNNING
+
+ elif hda_state_counts[ states.QUEUED ] > 0:
+ state = states.QUEUED
+
+ elif( ( hda_state_counts[ states.ERROR ] > 0 )
+ or ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
+ state = states.ERROR
+
+ elif hda_state_counts[ states.OK ] == num_hdas:
+ state = states.OK
+
+ return state
+
+ def _are_jobs_still_running( self, trans, hda_summary_list ):
+ """Determine whether any jobs are running from the given
+ list of hda summary dictionaries.
+ """
+ job_states = trans.model.Job.states
+ def is_job_running( job_state ):
+ return ( ( job_state == job_states.NEW )
+ or( job_state == job_states.UPLOAD )
+ or( job_state == job_states.WAITING )
+ or( job_state == job_states.QUEUED )
+ or( job_state == job_states.RUNNING ) )
+
+ return len( filter( lambda hda: is_job_running( hda['job_state'] ),
hda_summary_list ) )
+
+ def get_history_dict( self, trans, history ):
+ """Returns history data in the form of a dictionary.
+ """
+ history_dict = history.get_api_value( view='element', value_mapper={
'id':trans.security.encode_id })
+
+ history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
+
+ #TODO: separate, move to annotation api, fill on the client
+ history_dict[ 'annotation' ] = history.get_item_annotation_str(
trans.sa_session, trans.user, history )
+ if not history_dict[ 'annotation' ]:
+ history_dict[ 'annotation' ] = ''
+
+ #TODO: allow passing as arg
+ hda_summaries = self.get_hda_summary_dicts( trans, history )
+ #TODO remove the following in v2
+ ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries
)
+ history_dict[ 'state_details' ] = state_counts
+ history_dict[ 'state_ids' ] = state_ids
+ history_dict[ 'state' ] = self._get_history_state_from_hdas( trans,
history, state_counts )
+
+ history_dict[ 'jobs_running' ] = self._are_jobs_still_running( trans,
hda_summaries )
+
+ return history_dict
+
class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form
objects."""
diff -r c82a139db1c98a990760e767da00fe7aeaa03d28 -r
75ba4f814129bb24ef85feb5fde99c115a707d4d lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -62,26 +62,6 @@
params = util.Params( kwd )
deleted = util.string_as_bool( deleted )
- states = trans.app.model.Dataset.states
-
- def get_dataset_state_summaries( datasets ):
- # cycles through the history's datasets, building counts and id lists for
each possible ds state
- state_counts = {}
- state_ids = {}
-
- # init counts, ids for each state
- for key, state in states.items():
- state_counts[state] = 0
- state_ids[state] = []
-
- # cycle through datasets saving each ds' state
- for dataset in datasets:
- item_state = dataset.state
- if not dataset.deleted:
- state_counts[ item_state ] = state_counts[ item_state ] + 1
- state_ids[ item_state ].append( trans.security.encode_id( dataset.id ) )
- return ( state_counts, state_ids )
-
# try to load the history, by most_recently_used or the given id
try:
if history_id == "most_recently_used":
@@ -94,42 +74,7 @@
history = self.get_history( trans, history_id, check_ownership=False,
check_accessible=True, deleted=deleted )
- history_data = history.get_api_value( view='element',
value_mapper={'id':trans.security.encode_id} )
- history_data[ 'nice_size' ] = history.get_disk_size( nice_size=True
)
-
- #TODO: separate, move to annotation api, fill on the client
- history_data[ 'annotation' ] = history.get_item_annotation_str(
trans.sa_session, trans.user, history )
- if not history_data[ 'annotation' ]:
- history_data[ 'annotation' ] = ''
-
- # get the history state using the state summaries of it's datasets
(default to ERROR)
- num_sets = len([ hda.id for hda in history.datasets if not hda.deleted ])
- state = states.ERROR
-
- ( state_counts, state_ids ) = get_dataset_state_summaries( history.datasets
)
-
- if num_sets == 0:
- state = states.NEW
-
- else:
- if( ( state_counts[ states.RUNNING ] > 0 )
- or ( state_counts[ states.SETTING_METADATA ] > 0 )
- or ( state_counts[ states.UPLOAD ] > 0 ) ):
- state = states.RUNNING
-
- elif state_counts[ states.QUEUED ] > 0:
- state = states.QUEUED
-
- elif( ( state_counts[ states.ERROR ] > 0 )
- or ( state_counts[ states.FAILED_METADATA ] > 0 ) ):
- state = states.ERROR
-
- elif state_counts[ states.OK ] == num_sets:
- state = states.OK
-
- history_data[ 'state' ] = state
- history_data[ 'state_details' ] = state_counts
- history_data[ 'state_ids' ] = state_ids
+ history_data = self.get_history_dict( trans, history )
history_data[ 'contents_url' ] = url_for( 'history_contents',
history_id=history_id )
except Exception, e:
https://bitbucket.org/galaxy/galaxy-central/commits/cf74f0879bfe/
Changeset: cf74f0879bfe
User: carlfeberhard
Date: 2013-03-25 18:34:27
Summary: merge next-stable
Affected #: 2 files
diff -r 6e09f0398ddc0f85b0776f7c98d21a6b3458b1d6 -r
cf74f0879bfe111b9a6763dfaff715154d4e1693 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -833,7 +833,6 @@
Returns a dictionary with state counts for history's HDAs. Key is a
dataset state, value is the number of states in that count.
"""
-
# Build query to get (state, count) pairs.
cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*'
) ]
from_obj = trans.app.model.HistoryDatasetAssociation.table.join(
trans.app.model.Dataset.table )
@@ -864,6 +863,135 @@
return state_count_dict
+ def get_hda_summary_dicts( self, trans, history ):
+ """Returns a list of dictionaries containing summary information
+ for each HDA in the given history.
+ """
+ hda_model = trans.model.HistoryDatasetAssociation
+
+ # outer join with job output to get job_state or None
+ job_subq = ( trans.sa_session.query(
+ trans.model.Job.id.label( 'job_id' ),
+ trans.model.Job.state.label( 'job_state' ),
+ trans.model.JobToOutputDatasetAssociation.dataset_id.label(
'hda_id' ) )
+ .join( trans.model.JobToOutputDatasetAssociation ) ).subquery()
+
+ # get state, name, etc.
+ columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
+ trans.model.Dataset.state,
+ job_subq.c.job_state, job_subq.c.job_id )
+ column_keys = [ "name", "hid", "id",
"deleted", "state", "job_state", "job_id" ]
+
+ query = ( trans.sa_session.query( *columns )
+ .enable_eagerloads( False )
+ .filter( hda_model.history == history )
+ .join( trans.model.Dataset )
+ .outerjoin(( job_subq, job_subq.c.hda_id == hda_model.id ))
+ .order_by( hda_model.hid ) )
+
+ # build dictionaries, adding history id and encoding all ids
+ hda_dicts = []
+ for hda_tuple in query.all():
+ hda_dict = dict( zip( column_keys, hda_tuple ) )
+ #if hda_dict[ 'job_state' ] not in [ None, 'ok' ]:
+ # print hda_dict[ 'hid' ], hda_dict[ 'name' ], hda_dict[
'job_state' ]
+ hda_dict[ 'history_id' ] = history.id
+ trans.security.encode_dict_ids( hda_dict )
+ hda_dicts.append( hda_dict )
+ return hda_dicts
+
+ def _get_hda_state_summaries( self, trans, hda_dict_list ):
+ """Returns two dictionaries (in a tuple): state_counts and
state_ids.
+ Each is keyed according to the possible hda states:
+ _counts contains a sum of the datasets in each state
+ _ids contains a list of the encoded ids for each hda in that state
+
+ hda_dict_list should be a list of hda data in dictionary form.
+ """
+ #TODO: doc to rst
+ # init counts, ids for each state
+ state_counts = {}
+ state_ids = {}
+ for key, state in trans.app.model.Dataset.states.items():
+ state_counts[ state ] = 0
+ state_ids[ state ] = []
+
+ for hda_dict in hda_dict_list:
+ item_state = hda_dict['state']
+ if not hda_dict['deleted']:
+ state_counts[ item_state ] = state_counts[ item_state ] + 1
+ # needs to return all ids (no deleted check)
+ state_ids[ item_state ].append( hda_dict['id'] )
+
+ return ( state_counts, state_ids )
+
+ def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
+ """Returns the history state based on the states of the HDAs it
contains.
+ """
+ states = trans.app.model.Dataset.states
+
+ num_hdas = sum( hda_state_counts.values() )
+ # (default to ERROR)
+ state = states.ERROR
+ if num_hdas == 0:
+ state = states.NEW
+
+ else:
+ if( ( hda_state_counts[ states.RUNNING ] > 0 )
+ or ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
+ or ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
+ state = states.RUNNING
+
+ elif hda_state_counts[ states.QUEUED ] > 0:
+ state = states.QUEUED
+
+ elif( ( hda_state_counts[ states.ERROR ] > 0 )
+ or ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
+ state = states.ERROR
+
+ elif hda_state_counts[ states.OK ] == num_hdas:
+ state = states.OK
+
+ return state
+
+ def _are_jobs_still_running( self, trans, hda_summary_list ):
+ """Determine whether any jobs are running from the given
+ list of hda summary dictionaries.
+ """
+ job_states = trans.model.Job.states
+ def is_job_running( job_state ):
+ return ( ( job_state == job_states.NEW )
+ or( job_state == job_states.UPLOAD )
+ or( job_state == job_states.WAITING )
+ or( job_state == job_states.QUEUED )
+ or( job_state == job_states.RUNNING ) )
+
+ return len( filter( lambda hda: is_job_running( hda['job_state'] ),
hda_summary_list ) )
+
+ def get_history_dict( self, trans, history ):
+ """Returns history data in the form of a dictionary.
+ """
+ history_dict = history.get_api_value( view='element', value_mapper={
'id':trans.security.encode_id })
+
+ history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
+
+ #TODO: separate, move to annotation api, fill on the client
+ history_dict[ 'annotation' ] = history.get_item_annotation_str(
trans.sa_session, trans.user, history )
+ if not history_dict[ 'annotation' ]:
+ history_dict[ 'annotation' ] = ''
+
+ #TODO: allow passing as arg
+ hda_summaries = self.get_hda_summary_dicts( trans, history )
+ #TODO remove the following in v2
+ ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries
)
+ history_dict[ 'state_details' ] = state_counts
+ history_dict[ 'state_ids' ] = state_ids
+ history_dict[ 'state' ] = self._get_history_state_from_hdas( trans,
history, state_counts )
+
+ history_dict[ 'jobs_running' ] = self._are_jobs_still_running( trans,
hda_summaries )
+
+ return history_dict
+
class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form
objects."""
diff -r 6e09f0398ddc0f85b0776f7c98d21a6b3458b1d6 -r
cf74f0879bfe111b9a6763dfaff715154d4e1693 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -62,26 +62,6 @@
params = util.Params( kwd )
deleted = util.string_as_bool( deleted )
- states = trans.app.model.Dataset.states
-
- def get_dataset_state_summaries( datasets ):
- # cycles through the history's datasets, building counts and id lists for
each possible ds state
- state_counts = {}
- state_ids = {}
-
- # init counts, ids for each state
- for key, state in states.items():
- state_counts[state] = 0
- state_ids[state] = []
-
- # cycle through datasets saving each ds' state
- for dataset in datasets:
- item_state = dataset.state
- if not dataset.deleted:
- state_counts[ item_state ] = state_counts[ item_state ] + 1
- state_ids[ item_state ].append( trans.security.encode_id( dataset.id ) )
- return ( state_counts, state_ids )
-
# try to load the history, by most_recently_used or the given id
try:
if history_id == "most_recently_used":
@@ -94,42 +74,7 @@
history = self.get_history( trans, history_id, check_ownership=False,
check_accessible=True, deleted=deleted )
- history_data = history.get_api_value( view='element',
value_mapper={'id':trans.security.encode_id} )
- history_data[ 'nice_size' ] = history.get_disk_size( nice_size=True
)
-
- #TODO: separate, move to annotation api, fill on the client
- history_data[ 'annotation' ] = history.get_item_annotation_str(
trans.sa_session, trans.user, history )
- if not history_data[ 'annotation' ]:
- history_data[ 'annotation' ] = ''
-
- # get the history state using the state summaries of it's datasets
(default to ERROR)
- num_sets = len([ hda.id for hda in history.datasets if not hda.deleted ])
- state = states.ERROR
-
- ( state_counts, state_ids ) = get_dataset_state_summaries( history.datasets
)
-
- if num_sets == 0:
- state = states.NEW
-
- else:
- if( ( state_counts[ states.RUNNING ] > 0 )
- or ( state_counts[ states.SETTING_METADATA ] > 0 )
- or ( state_counts[ states.UPLOAD ] > 0 ) ):
- state = states.RUNNING
-
- elif state_counts[ states.QUEUED ] > 0:
- state = states.QUEUED
-
- elif( ( state_counts[ states.ERROR ] > 0 )
- or ( state_counts[ states.FAILED_METADATA ] > 0 ) ):
- state = states.ERROR
-
- elif state_counts[ states.OK ] == num_sets:
- state = states.OK
-
- history_data[ 'state' ] = state
- history_data[ 'state_details' ] = state_counts
- history_data[ 'state_ids' ] = state_ids
+ history_data = self.get_history_dict( trans, history )
history_data[ 'contents_url' ] = url_for( 'history_contents',
history_id=history_id )
except Exception, e:
Repository URL:
https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from
bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.