commit/galaxy-central: 6 new changesets
6 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/ed45cbef6347/ Changeset: ed45cbef6347 Branch: model-info User: Kyle Ellrott Date: 2014-02-26 23:28:40 Summary: Patch to expose the actual dataset id in the LDDA and HDA to_dict calls (in addition to the instance id). Affected #: 1 file diff -r 1c91163d6c768452d106bdc8d843406fd7696ac7 -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1823,6 +1823,7 @@ rval = dict( id = hda.id, hda_ldda = 'hda', uuid = ( lambda uuid: str( uuid ) if uuid else None )( hda.dataset.uuid ), + dataset_id = hda.dataset.id, hid = hda.hid, file_ext = hda.ext, peek = ( lambda hda: hda.display_peek() if hda.peek and hda.peek != 'no peek' else None )( hda ), @@ -2251,6 +2252,7 @@ file_size = 0 rval = dict( id = ldda.id, hda_ldda = 'ldda', + dataset_id = ldda.dataset.id, model_class = self.__class__.__name__, name = ldda.name, deleted = ldda.deleted, https://bitbucket.org/galaxy/galaxy-central/commits/95050c5d33b9/ Changeset: 95050c5d33b9 Branch: model-info User: kellrott Date: 2014-03-04 22:04:28 Summary: Default merge Affected #: 69 files diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa config/plugins/visualizations/charts/static/app.js --- a/config/plugins/visualizations/charts/static/app.js +++ b/config/plugins/visualizations/charts/static/app.js @@ -61,9 +61,6 @@ // events var self = this; - this.config.on('change:current_view', function() { - self._showCurrent(); - }); this.config.on('change:title', function() { self._refreshTitle(); }); @@ -86,11 +83,6 @@ this.portlet.title('Charts' + title); }, - // current view - _showCurrent: function() { - - }, - // execute command execute: function(options) { }, diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa config/plugins/visualizations/charts/static/library/table.js --- a/config/plugins/visualizations/charts/static/library/table.js +++ b/config/plugins/visualizations/charts/static/library/table.js @@ -33,7 +33,15 @@ this.options = Utils.merge(options, this.optionsDefault); // create new element - this.setElement(this._template(options)); + var $el = $(this._template(options)); + + // link sub-elements + this.$thead = $el.find('thead'); + this.$tbody = $el.find('tbody'); + this.$tmessage = $el.find('tmessage'); + + // set element + this.setElement($el); // initialize row this.row = $('<tr></tr>'); @@ -49,7 +57,7 @@ // header appendHeader: function() { // append header row - $(this.el).find('thead').append(this.row); + this.$thead.append(this.row); // row this.row = $('<tr></tr>'); @@ -74,14 +82,14 @@ // remove remove: function(id) { - $(this.el).find('#' + id).remove(); + this.$tbody.find('#' + id).remove(); this.row_count--; this._refresh(); }, // remove removeAll: function() { - $(this.el).find('tbody').html(''); + this.$tbody.html(''); this.row_count = 0; this._refresh(); }, @@ -89,18 +97,18 @@ // value value: function(new_value) { // get current id/value - this.before = this.$el.find('.current').attr('id'); + this.before = this.$tbody.find('.current').attr('id'); // check if new_value is defined if (new_value !== undefined) { - this.$el.find('tr').removeClass('current'); + this.$tbody.find('tr').removeClass('current'); if (new_value) { - this.$el.find('#' + new_value).addClass('current'); + this.$tbody.find('#' + new_value).addClass('current'); } } // get current id/value - var after = this.$el.find('.current').attr('id'); + var after = this.$tbody.find('.current').attr('id'); if(after === undefined) { return null; } else { @@ -114,6 +122,11 @@ } }, + // size + size: function() { + return this.$tbody.find('tr').length; + }, + // commit _commit: function(id, prepend) { // add @@ -121,9 +134,9 @@ // add row if (prepend) { - $(this.el).find('tbody').prepend(this.row); + this.$tbody.prepend(this.row); } else { - $(this.el).find('tbody').append(this.row); + this.$tbody.append(this.row); } // row @@ -161,9 +174,9 @@ // refresh _refresh: function() { if (this.row_count == 0) { - this.$el.find('tmessage').show(); + this.$tmessage.show(); } else { - this.$el.find('tmessage').hide(); + this.$tmessage.hide(); } }, diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa config/plugins/visualizations/charts/static/library/ui.js --- a/config/plugins/visualizations/charts/static/library/ui.js +++ b/config/plugins/visualizations/charts/static/library/ui.js @@ -111,7 +111,7 @@ if (options.title) { str += '<div style="margin-right: 5px; margin-left: 5px;">' + '<i class="icon fa ' + options.icon + '"/> ' + - '<span style="position: relative; font-size: 0.8em; top: -1px;">' + options.title + '</span>' + + '<span style="position: relative; font-size: 0.8em; font-weight: normal; top: -1px;">' + options.title + '</span>' + '</div>'; } else { str += '<i class="icon fa ' + options.icon + '"/>'; diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa config/plugins/visualizations/charts/static/views/chart.js --- a/config/plugins/visualizations/charts/static/views/chart.js +++ b/config/plugins/visualizations/charts/static/views/chart.js @@ -96,6 +96,10 @@ // save chart self._saveChart(); + + // show viewport + self.hide(); + self.app.charts_view.$el.show(); } }), 'back' : new Ui.ButtonIcon({ @@ -103,7 +107,7 @@ tooltip : 'Return to Viewer', title : 'Return', onclick : function() { - self.$el.hide(); + self.hide(); self.app.charts_view.$el.show(); } }) @@ -161,7 +165,7 @@ self.tabs.showOperation('back'); }); this.app.charts.on('remove', function(chart) { - if (self.app.charts.length == 1) { + if (self.app.charts.length == 0) { self.tabs.hideOperation('back'); } }); @@ -187,6 +191,12 @@ this._resetChart(); }, + // hide + hide: function() { + $('.tooltip').hide(); + this.$el.hide(); + }, + // update _refreshLabels: function() { var self = this; @@ -275,12 +285,6 @@ // update chart model current.copy(this.chart); - - // hide - this.$el.hide(); - - // update main - this.app.charts_view.$el.show(); } }); diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa config/plugins/visualizations/charts/static/views/charts.js --- a/config/plugins/visualizations/charts/static/views/charts.js +++ b/config/plugins/visualizations/charts/static/views/charts.js @@ -19,7 +19,7 @@ self.app.chart.copy(chart); // hide this element - self.$el.hide(); + self.hide(); // show chart view self.app.chart_view.$el.show(); @@ -44,17 +44,19 @@ height : 100, operations : { 'new' : new Ui.ButtonIcon({ - icon : 'fa-plus', - tooltip: 'Create', + icon : 'fa-magic', + tooltip: 'Create a new Chart', + title: 'New', onclick: function() { - self.$el.hide(); + self.hide(); self.app.chart.reset(); self.app.chart_view.$el.show(); } }), 'delete' : new Ui.ButtonIcon({ - icon : 'fa-minus', - tooltip: 'Delete', + icon : 'fa-trash-o', + tooltip: 'Delete this Chart', + title: 'Delete', onclick: function() { // check if element has been selected @@ -109,6 +111,12 @@ }); }, + // hide + hide: function() { + $('.tooltip').hide(); + this.$el.hide(); + }, + // append append : function($el) { this.$el.append(Utils.wrap('')); @@ -134,6 +142,15 @@ _removeChart: function(chart) { // remove from to table this.table.remove(chart.id); + + // check if table is empty + if (this.table.size() == 0) { + this.hide(); + this.app.chart_view.$el.show(); + } else { + // select available chart + this.table.value(this.app.charts.last().id); + } } }); diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa config/plugins/visualizations/charts/static/views/viewport.js --- a/config/plugins/visualizations/charts/static/views/viewport.js +++ b/config/plugins/visualizations/charts/static/views/viewport.js @@ -31,7 +31,8 @@ operations : { 'edit' : new Ui.ButtonIcon({ icon : 'fa-gear', - tooltip : 'Configure' + tooltip : 'Customize Chart', + title : 'Customize' }) } }); @@ -80,7 +81,7 @@ self.app.chart.copy(chart); // show edit - self.app.charts_view.$el.hide(); + self.app.charts_view.hide(); self.app.chart_view.$el.show(); }); @@ -89,6 +90,12 @@ } }, + // hide + hide: function() { + $('.tooltip').hide(); + this.$el.hide(); + }, + // add _addChart: function(chart) { // link this diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -207,6 +207,7 @@ self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) ) self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) ) self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) ) + self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) ) self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) ) self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",") self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",") diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/datatypes/data.py --- a/lib/galaxy/datatypes/data.py +++ b/lib/galaxy/datatypes/data.py @@ -21,6 +21,12 @@ eggs.require( "Paste" ) import paste +XSS_VULNERABLE_MIME_TYPES = [ + 'image/svg+xml', # Unfiltered by Galaxy and may contain JS that would be executed by some browsers. + 'application/xml', # Some browsers will evalute SVG embedded JS in such XML documents. +] +DEFAULT_MIME_TYPE = 'text/plain' # Vulnerable mime types will be replaced with this. + log = logging.getLogger(__name__) comptypes=[] # Is this being used anywhere, why was this here? -JohnC @@ -334,11 +340,12 @@ mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] ) except: mime = "text/plain" - trans.response.set_content_type( mime ) + self._clean_and_set_mime_type( trans, mime ) return open( file_path ) else: return trans.show_error_message( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) ) - trans.response.set_content_type(data.get_mime()) + self._clean_and_set_mime_type( trans, data.get_mime() ) + trans.log_event( "Display dataset id: %s" % str( data.id ) ) from galaxy import datatypes #DBTODO REMOVE THIS AT REFACTOR if to_ext or isinstance(data.datatype, datatypes.binary.Binary): # Saving the file, or binary file @@ -624,6 +631,12 @@ dataset_source = dataproviders.dataset.DatasetDataProvider( dataset ) return dataproviders.chunk.Base64ChunkDataProvider( dataset_source, **settings ) + def _clean_and_set_mime_type(self, trans, mime): + if mime.lower() in XSS_VULNERABLE_MIME_TYPES: + if not getattr( trans.app.config, "serve_xss_vulnerable_mimetypes", True ): + mime = DEFAULT_MIME_TYPE + trans.response.set_content_type( mime ) + @dataproviders.decorators.has_dataproviders class Text( Data ): diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -181,7 +181,7 @@ if tools is not None: for tool in self.__findall_with_required(tools, 'tool'): # There can be multiple definitions with identical ids, but different params - id = tool.get('id').lower() + id = tool.get('id').lower().rstrip('/') if id not in self.tools: self.tools[id] = list() self.tools[id].append(JobToolConfiguration(**dict(tool.items()))) diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/jobs/runners/cli_job/torque.py --- a/lib/galaxy/jobs/runners/cli_job/torque.py +++ b/lib/galaxy/jobs/runners/cli_job/torque.py @@ -131,4 +131,5 @@ def __get_job_state(self, state): return { 'E' : job_states.RUNNING, 'R' : job_states.RUNNING, - 'Q' : job_states.QUEUED }.get(state, state) + 'Q' : job_states.QUEUED, + 'C' : job_states.OK }.get(state, state) diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -19,6 +19,7 @@ import time from string import Template from itertools import ifilter +from itertools import chain import galaxy.datatypes import galaxy.datatypes.registry @@ -964,24 +965,37 @@ """ Fetch filtered list of contents of history. """ - python_filter = None + default_contents_types = [ + 'dataset', + ] + types = kwds.get('types', default_contents_types) + iters = [] + if 'dataset' in types: + iters.append( self.__dataset_contents_iter( **kwds ) ) + return galaxy.util.merge_sorted_iterables( operator.attrgetter( "hid" ), *iters ) + + def __dataset_contents_iter(self, **kwds): + return self.__filter_contents( HistoryDatasetAssociation, **kwds ) + + def __filter_contents( self, content_class, **kwds ): db_session = object_session( self ) assert db_session != None - query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id ) - query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() ) + query = db_session.query( content_class ).filter( content_class.table.c.history_id == self.id ) + query = query.order_by( content_class.table.c.hid.asc() ) + python_filter = None deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) ) if deleted is not None: - query = query.filter( HistoryDatasetAssociation.deleted == deleted ) + query = query.filter( content_class.deleted == deleted ) visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) ) if visible is not None: - query = query.filter( HistoryDatasetAssociation.visible == visible ) + query = query.filter( content_class.visible == visible ) if 'ids' in kwds: ids = kwds['ids'] max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH) if len(ids) < max_in_filter_length: - query = query.filter( HistoryDatasetAssociation.id.in_(ids) ) + query = query.filter( content_class.id.in_(ids) ) else: - python_filter = lambda hda: hda.id in ids + python_filter = lambda content: content.id in ids if python_filter: return ifilter(python_filter, query) else: @@ -2128,6 +2142,7 @@ parent_library_id = self.folder.parent_library.id, folder_id = self.folder_id, model_class = self.__class__.__name__, + state = ldda.state, name = ldda.name, file_name = ldda.file_name, uploaded_by = ldda.user.email, diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1644,18 +1644,7 @@ # Also collect children via association object dataset_collector( hda.children, hid ) dataset_collector( history.active_datasets_children_and_roles, None ) - - set_selected = field.get_selected( return_label=True, return_value=True, multi=False ) is not None - # Ensure than an item is always selected - if self.optional: - if set_selected: - field.add_option( "Selection is Optional", 'None', False ) - else: - field.add_option( "Selection is Optional", 'None', True ) - elif not set_selected and bool( field.options ): - # Select the last item - a, b, c = field.options[-1] - field.options[-1] = a, b, True + self._ensure_selection( field ) return field def get_initial_value( self, trans, context, history=None ): @@ -1835,6 +1824,19 @@ assert history is not None, "%s requires a history" % class_name return history + def _ensure_selection( self, field ): + set_selected = field.get_selected( return_label=True, return_value=True, multi=False ) is not None + # Ensure than an item is always selected + if self.optional: + if set_selected: + field.add_option( "Selection is Optional", 'None', False ) + else: + field.add_option( "Selection is Optional", 'None', True ) + elif not set_selected and bool( field.options ): + # Select the last item + a, b, c = field.options[-1] + field.options[-1] = a, b, True + class HiddenDataToolParameter( HiddenToolParameter, DataToolParameter ): """ diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -427,6 +427,59 @@ return os.path.commonprefix( [ file, directory ] ) == directory +def merge_sorted_iterables( operator, *iterables ): + """ + + >>> operator = lambda x: x + >>> list( merge_sorted_iterables( operator, [1,2,3], [4,5] ) ) + [1, 2, 3, 4, 5] + >>> list( merge_sorted_iterables( operator, [4, 5], [1,2,3] ) ) + [1, 2, 3, 4, 5] + >>> list( merge_sorted_iterables( operator, [1, 4, 5], [2], [3] ) ) + [1, 2, 3, 4, 5] + """ + first_iterable = iterables[ 0 ] + if len( iterables ) == 1: + for el in first_iterable: + yield el + else: + for el in __merge_two_sorted_iterables( + operator, + iter( first_iterable ), + merge_sorted_iterables( operator, *iterables[ 1: ] ) + ): + yield el + + +def __merge_two_sorted_iterables( operator, iterable1, iterable2 ): + unset = object() + continue_merge = True + next_1 = unset + next_2 = unset + while continue_merge: + try: + if next_1 is unset: + next_1 = next( iterable1 ) + if next_2 is unset: + next_2 = next( iterable2 ) + if operator( next_2 ) < operator( next_1 ): + yield next_2 + next_2 = unset + else: + yield next_1 + next_1 = unset + except StopIteration: + continue_merge = False + if next_1 is not unset: + yield next_1 + if next_2 is not unset: + yield next_2 + for el in iterable1: + yield el + for el in iterable2: + yield el + + class Params( object ): """ Stores and 'sanitizes' parameters. Alphanumeric characters and the diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/visualization/genomes.py --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -210,7 +210,7 @@ rval = self.genomes[ dbkey ] return rval - def get_dbkeys( self, trans, chrom_info=False ): + def get_dbkeys( self, trans, chrom_info=False, **kwd ): """ Returns all known dbkeys. If chrom_info is True, only dbkeys with chromosome lengths are returned. """ dbkeys = [] diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/api/history_contents.py --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -33,6 +33,9 @@ :param history_id: encoded id string of the HDA's History :type ids: str :param ids: (optional) a comma separated list of encoded `HDA` ids + :param types: (optional) kinds of contents to index (currently just + dataset, but dataset_collection will be added shortly). + :type types: str :rtype: list :returns: dictionaries containing summary or detailed HDA information @@ -51,7 +54,16 @@ else: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True ) - contents_kwds = {} + # Allow passing in type or types - for continuity rest of methods + # take in type - but this one can be passed multiple types and + # type=dataset,dataset_collection is a bit silly. + types = kwd.get( 'type', kwd.get( 'types', None ) ) or [] + if types: + types = util.listify(types) + else: + types = [ 'dataset' ] + + contents_kwds = {'types': types} if ids: ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) ) contents_kwds[ 'ids' ] = ids @@ -66,13 +78,14 @@ if details and details != 'all': details = util.listify( details ) - for hda in history.contents_iter( **contents_kwds ): - encoded_hda_id = trans.security.encode_id( hda.id ) - detailed = details == 'all' or ( encoded_hda_id in details ) - if detailed: - rval.append( self._detailed_hda_dict( trans, hda ) ) - else: - rval.append( self._summary_hda_dict( trans, history_id, hda ) ) + for content in history.contents_iter( **contents_kwds ): + if isinstance(content, trans.app.model.HistoryDatasetAssociation): + encoded_content_id = trans.security.encode_id( content.id ) + detailed = details == 'all' or ( encoded_content_id in details ) + if detailed: + rval.append( self._detailed_hda_dict( trans, content ) ) + else: + rval.append( self._summary_hda_dict( trans, history_id, content ) ) except Exception, e: # for errors that are not specific to one hda (history lookup or summary list) rval = "Error in history API at listing contents: " + str( e ) @@ -138,6 +151,13 @@ :returns: dictionary containing detailed HDA information .. seealso:: :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_hda_dict` """ + contents_type = kwd.get('type', 'dataset') + if contents_type == 'dataset': + return self.__show_dataset( trans, id, history_id, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, contents_type ) + + def __show_dataset( self, trans, id, history_id, **kwd ): try: hda = self.get_history_dataset_association_from_ids( trans, id, history_id ) hda_dict = self.get_hda_dict( trans, hda ) @@ -178,11 +198,6 @@ #TODO: copy existing, accessible hda - dataset controller, copy_datasets #TODO: convert existing, accessible hda - model.DatasetInstance(or hda.datatype).get_converter_types # check parameters - source = payload.get('source', None) - content = payload.get('content', None) - if source not in ['library', 'hda'] or content is None: - trans.response.status = 400 - return "Please define the source ('library' or 'hda') and the content." # retrieve history try: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False ) @@ -190,6 +205,18 @@ # no way to tell if it failed bc of perms or other (all MessageExceptions) trans.response.status = 500 return str( e ) + type = payload.get('type', 'dataset') + if type == 'dataset': + return self.__create_dataset( trans, history, payload, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, type ) + + def __create_dataset( self, trans, history, payload, **kwd ): + source = payload.get('source', None) + content = payload.get('content', None) + if source not in ['library', 'hda'] or content is None: + trans.response.status = 400 + return "Please define the source ('library' or 'hda') and the content." # copy from library dataset if source == 'library': # get library data set @@ -221,7 +248,7 @@ return str( msg_exc ) except Exception, exc: trans.response.status = 500 - log.exception( "history: %s, source: %s, content: %s", history_id, source, content ) + log.exception( "history: %s, source: %s, content: %s", trans.security.encode_id(history.id), source, content ) return str( exc ) data_copy=hda.copy( copy_children=True ) result=history.add_dataset( data_copy ) @@ -255,6 +282,13 @@ any values that were different from the original and, therefore, updated """ #TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks) + contents_type = kwd.get('type', 'dataset') + if contents_type == "dataset": + return self.__update_dataset( trans, history_id, id, payload, **kwd ) + else: + return self.__handle_unknown_contents_type( contents_type ) + + def __update_dataset( self, trans, history_id, id, payload, **kwd ): changed = {} try: # anon user @@ -323,6 +357,13 @@ * deleted: if the history was marked as deleted, * purged: if the history was purged """ + contents_type = kwd.get('type', 'dataset') + if contents_type == "dataset": + return self.__delete_dataset( trans, history_id, id, purge=purge, **kwd ) + else: + return self.__handle_unknown_contents_type( trans, contents_type ) + + def __delete_dataset( self, trans, history_id, id, purge, **kwd ): # get purge from the query or from the request body payload (a request body is optional here) purge = util.string_as_bool( purge ) if kwd.get( 'payload', None ): @@ -407,3 +448,8 @@ pass #log.warn( 'unknown key: %s', str( key ) ) return validated_payload + + def __handle_unknown_contents_type( self, trans, contents_type ): + # TODO: raise a message exception instead of setting status and returning dict. + trans.response.status = 400 + return { 'error': 'Unknown contents type %s' % type } diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/api/jobs.py --- a/lib/galaxy/webapps/galaxy/api/jobs.py +++ b/lib/galaxy/webapps/galaxy/api/jobs.py @@ -12,6 +12,7 @@ from galaxy.web.base.controller import UsesHistoryDatasetAssociationMixin from galaxy.web.base.controller import UsesLibraryMixinItems from galaxy import exceptions +from galaxy import util import logging log = logging.getLogger( __name__ ) @@ -22,7 +23,7 @@ @expose_api def index( self, trans, **kwd ): """ - index( trans, state=None ) + index( trans, state=None, tool_id=None, history_id=None ) * GET /api/jobs: return jobs for current user @@ -31,6 +32,12 @@ Valid Galaxy job states include: 'new', 'upload', 'waiting', 'queued', 'running', 'ok', 'error', 'paused', 'deleted', 'deleted_new' + :type tool_id: string or list + :param tool_id: limit listing of jobs to those that match one of the included tool_ids. If none, all are returned. + + :type history_id: string + :param history_id: limit listing of jobs to those that match the history_id. If none, all are returned. + :rtype: list :returns: list of dictionaries containing summary job information """ @@ -39,14 +46,30 @@ query = trans.sa_session.query( trans.app.model.Job ).filter( trans.app.model.Job.user == trans.user ) - if state is not None: - if isinstance( state, basestring ): - query = query.filter( trans.app.model.Job.state == state ) - elif isinstance( state, list ): - t = [] - for s in state: - t.append( trans.app.model.Job.state == s ) - query = query.filter( or_( *t ) ) + + def build_and_apply_filters( query, objects, filter_func ): + if objects is not None: + if isinstance( objects, basestring ): + query = query.filter( filter_func( objects ) ) + elif isinstance( objects, list ): + t = [] + for obj in objects: + t.append( filter_func( obj ) ) + query = query.filter( or_( *t ) ) + return query + + query = build_and_apply_filters( query, state, lambda s: trans.app.model.Job.state == s ) + + query = build_and_apply_filters( query, kwd.get( 'tool_id', None ), lambda t: trans.app.model.Job.tool_id == t ) + query = build_and_apply_filters( query, kwd.get( 'tool_id_like', None ), lambda t: trans.app.model.Job.tool_id.like(t) ) + + history_id = kwd.get( 'history_id', None ) + if history_id is not None: + try: + decoded_history_id = trans.security.decode_id(history_id) + query = query.filter( trans.app.model.Job.history_id == decoded_history_id ) + except: + raise exceptions.ObjectAttributeInvalidException() out = [] for job in query.order_by( diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/api/libraries.py --- a/lib/galaxy/webapps/galaxy/api/libraries.py +++ b/lib/galaxy/webapps/galaxy/api/libraries.py @@ -130,6 +130,7 @@ new_library['description'] = description new_library['synopsis'] = synopsis new_library['id'] = encoded_id + new_library['root_folder_id'] = trans.security.encode_id( root_folder.id ) return new_library def edit( self, trans, encoded_id, payload, **kwd ): diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/api/workflows.py --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -79,7 +79,7 @@ user=trans.user ).join( 'stored_workflow' ).filter( trans.app.model.StoredWorkflow.deleted == False ).order_by( desc( trans.app.model.StoredWorkflow.update_time ) ).all(): - item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id }) + item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } ) encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) item['url'] = url_for( 'workflow', id=encoded_id ) rval.append(item) @@ -127,6 +127,8 @@ steps[step.id] = {'id': step.id, 'type': step.type, 'tool_id': step.tool_id, + 'tool_version': step.tool_version, + 'tool_inputs': step.tool_inputs, 'input_steps': {}} for conn in step.input_connections: steps[step.id]['input_steps'][conn.input_name] = {'source_step': conn.output_step_id, @@ -139,17 +141,38 @@ """ POST /api/workflows - We're not creating workflows from the api. Just execute for now. + We're not creating workflows from the api. Just execute for now. - However, we will import them if installed_repository_file is specified + However, we will import them if installed_repository_file is specified. + + :param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified + :type installed_repository_file str + + :param workflow_id: an existing workflow id. Either workflow_id or installed_repository_file must be specified + :type workflow_id: str + + :param parameters: See _update_step_parameters() + :type parameters: dict + + :param ds_map: A dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) + :type ds_map: dict + + :param no_add_to_history: if present in the payload with any value, the input datasets will not be added to the selected history + :type no_add_to_history: str + + :param history: Either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history + :type history: str + + :param replacement_params: A dictionary used when renaming datasets + :type replacement_params: dict """ # Pull parameters out of payload. - workflow_id = payload['workflow_id'] + workflow_id = payload.get('workflow_id', None) param_map = payload.get('parameters', {}) - ds_map = payload['ds_map'] + ds_map = payload.get('ds_map', {}) add_to_history = 'no_add_to_history' not in payload - history_param = payload['history'] + history_param = payload.get('history', '') # Get/create workflow. if not workflow_id: @@ -175,6 +198,20 @@ return("Workflow is not owned by or shared with current user") workflow = stored_workflow.latest_workflow + # Sanity checks. + if not workflow: + trans.response.status = 400 + return "Workflow not found." + if len( workflow.steps ) == 0: + trans.response.status = 400 + return "Workflow cannot be run because it does not have any steps" + if workflow.has_cycles: + trans.response.status = 400 + return "Workflow cannot be run because it contains cycles" + if workflow.has_errors: + trans.response.status = 400 + return "Workflow cannot be run because of validation errors in some steps" + # Get target history. if history_param.startswith('hist_id='): #Passing an existing history to use. @@ -210,7 +247,7 @@ else: trans.response.status = 400 return "Unknown dataset source '%s' specified." % ds_map[k]['src'] - if add_to_history and hda.history != history: + if add_to_history and hda.history != history: hda = hda.copy() history.add_dataset(hda) ds_map[k]['hda'] = hda @@ -218,22 +255,7 @@ trans.response.status = 400 return "Invalid Dataset '%s' Specified" % ds_map[k]['id'] - # Sanity checks. - if not workflow: - trans.response.status = 400 - return "Workflow not found." - if len( workflow.steps ) == 0: - trans.response.status = 400 - return "Workflow cannot be run because it does not have any steps" - if workflow.has_cycles: - trans.response.status = 400 - return "Workflow cannot be run because it contains cycles" - if workflow.has_errors: - trans.response.status = 400 - return "Workflow cannot be run because of validation errors in some steps" - # Build the state for each step - rval = {} for step in workflow.steps: step_errors = None input_connections_by_name = {} @@ -260,7 +282,7 @@ trans.response.status = 400 return "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages else: - # This is an input step. Make sure we have an available input. + # This is an input step. Make sure we have an available input. if step.type == 'data_input' and str(step.id) not in ds_map: trans.response.status = 400 return "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id @@ -268,12 +290,7 @@ step.state = step.module.get_runtime_state() # Run each step, connecting outputs to inputs - outputs = util.odict.odict() - rval['history'] = trans.security.encode_id(history.id) - rval['outputs'] = [] - replacement_dict = payload.get('replacement_params', {}) - outputs = invoke( trans=trans, workflow=workflow, @@ -285,6 +302,9 @@ # Build legacy output - should probably include more information from # outputs. + rval = {} + rval['history'] = trans.security.encode_id(history.id) + rval['outputs'] = [] for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[ step.id ].itervalues(): @@ -387,7 +407,7 @@ """ # Pull parameters out of payload. workflow_id = payload.get('workflow_id', None) - if workflow_id == None: + if workflow_id is None: raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." ) try: stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False ) @@ -452,7 +472,7 @@ if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0: raise exceptions.ItemOwnershipException() results = trans.sa_session.query(self.app.model.WorkflowInvocation).filter(self.app.model.WorkflowInvocation.workflow_id==stored_workflow.latest_workflow_id) - results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id)) + results = results.filter(self.app.model.WorkflowInvocation.id == trans.security.decode_id(usage_id)) out = results.first() if out is not None: return self.encode_all_ids( trans, out.to_dict('element'), True) diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -75,6 +75,19 @@ webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app ) + valid_history_contents_types = [ + 'dataset', + ] + # This must come before history contents below. + # Accesss HDA details via histories/:history_id/contents/datasets/:hda_id + webapp.mapper.resource( "typed_content", + "{type:%s}s" % "|".join( valid_history_contents_types ), + name_prefix="history_content_", + controller='history_contents', + path_prefix='/api/histories/:history_id/contents', + parent_resources=dict( member_name='history', collection_name='histories' ), + ) + # Legacy access to HDA details via histories/:history_id/contents/:hda_id webapp.mapper.resource( 'content', 'contents', controller='history_contents', diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/controllers/biostar.py --- a/lib/galaxy/webapps/galaxy/controllers/biostar.py +++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py @@ -7,6 +7,7 @@ import base64 from galaxy.util import json import hmac +import urlparse # Slugifying from Armin Ronacher (http://flask.pocoo.org/snippets/5/) @@ -15,6 +16,12 @@ _punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+') +BIOSTAR_ACTIONS = { + None: '', + 'new': 'p/new/post/', + 'show_tag_galaxy': 't/galaxy/' +} + def slugify(text, delim=u'-'): """Generates an slightly worse ASCII-only slug.""" @@ -28,12 +35,9 @@ # Biostar requires all keys to be present, so we start with a template DEFAULT_PAYLOAD = { - 'email': "", - 'title': "", - 'tags': 'galaxy', - 'tool_name': '', - 'tool_version': '', - 'tool_id': '' + 'title': '', + 'tag_val': 'galaxy', + 'content': '', } @@ -49,10 +53,32 @@ def tag_for_tool( tool ): """ - Generate a reasonavle biostar tag for a tool. + Generate a reasonable biostar tag for a tool. """ return slugify( unicode( tool.name ) ) +def determine_cookie_domain( galaxy_hostname, biostar_hostname ): + if galaxy_hostname == biostar_hostname: + return galaxy_hostname + + sub_biostar_hostname = biostar_hostname.split( '.', 1 )[-1] + if sub_biostar_hostname == galaxy_hostname: + return galaxy_hostname + + sub_galaxy_hostname = galaxy_hostname.split( '.', 1 )[-1] + if sub_biostar_hostname == sub_galaxy_hostname: + return sub_galaxy_hostname + + return galaxy_hostname + +def create_cookie( trans, key_name, key, email ): + digest = hmac.new( key, email ).hexdigest() + value = "%s:%s" % (email, digest) + trans.set_cookie( value, name=key_name, path='/', age=90, version='1' ) + #We need to explicitly set the domain here, in order to allow for biostar in a subdomain to work + galaxy_hostname = urlparse.urlsplit( url_for( '/', qualified=True ) ).hostname + biostar_hostname = urlparse.urlsplit( trans.app.config.biostar_url ).hostname + trans.response.cookies[ key_name ][ 'domain' ] = determine_cookie_domain( galaxy_hostname, biostar_hostname ) class BiostarController( BaseUIController ): """ @@ -65,26 +91,22 @@ Generate a redirect to a Biostar site using external authentication to pass Galaxy user information and information about a specific tool. """ - payload = payload or {} # Ensure biostar integration is enabled if not trans.app.config.biostar_url: return error( "Biostar integration is not enabled" ) + if biostar_action not in BIOSTAR_ACTIONS: + return error( "Invalid action specified (%s)." % ( biostar_action ) ) + # Start building up the payload + payload = payload or {} payload = dict( DEFAULT_PAYLOAD, **payload ) # Do the best we can of providing user information for the payload if trans.user: - payload['username'] = "user-" + trans.security.encode_id( trans.user.id ) - payload['email'] = trans.user.email - if trans.user.username: - payload['display_name'] = trans.user.username - else: - payload['display_name'] = trans.user.email.split( "@" )[0] + email = trans.user.email else: - encoded = trans.security.encode_id( trans.galaxy_session.id ) - payload['username'] = "anon-" + encoded - payload['display_name'] = "Anonymous Galaxy User" - data, digest = encode_data( trans.app.config.biostar_key, payload ) - return trans.response.send_redirect( url_for( trans.app.config.biostar_url, data=data, digest=digest, name=trans.app.config.biostar_key_name, action=biostar_action ) ) + email = "anon-%s" % ( trans.security.encode_id( trans.galaxy_session.id ) ) + create_cookie( trans, trans.app.config.biostar_key_name, trans.app.config.biostar_key, email ) + return trans.response.send_redirect( url_for( urlparse.urljoin( trans.app.config.biostar_url, BIOSTAR_ACTIONS[ biostar_action ] ), **payload ) ) @web.expose def biostar_question_redirect( self, trans, payload=None ): @@ -111,9 +133,8 @@ if not tool: return error( "No tool found matching '%s'" % tool_id ) # Tool specific information for payload - payload = { 'tool_name': tool.name, - 'tool_version': tool.version, - 'tool_id': tool.id, - 'tags': 'galaxy ' + tag_for_tool( tool ) } + payload = { 'title':'Need help with "%s" tool' % ( tool.name ), + 'content': '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s</p>' % ( tool.name, tool.version, tool.id ), + 'tag_val': ','.join( [ 'galaxy', tag_for_tool( tool ) ] ) } # Pass on to regular question method return self.biostar_question_redirect( trans, payload ) diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/controllers/tag.py --- a/lib/galaxy/webapps/galaxy/controllers/tag.py +++ b/lib/galaxy/webapps/galaxy/controllers/tag.py @@ -89,6 +89,7 @@ item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) ) user = trans.user item_class = self.get_class( item_class ) + q = '' if q is None else q q = q.encode( 'utf-8' ) if q.find( ":" ) == -1: return self._get_tag_autocomplete_names( trans, q, limit, timestamp, user, item, item_class ) diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -1557,23 +1557,28 @@ m.stored_workflow = q.get( id ) user.stored_workflow_menu_entries.append( m ) sess.flush() - return trans.show_message( "Menu updated", refresh_frames=['tools'] ) + message = "Menu updated" + refresh_frames = ['tools'] else: - user = trans.get_user() - ids_in_menu = set( [ x.stored_workflow_id for x in user.stored_workflow_menu_entries ] ) - workflows = trans.sa_session.query( model.StoredWorkflow ) \ - .filter_by( user=user, deleted=False ) \ - .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \ - .all() - shared_by_others = trans.sa_session \ - .query( model.StoredWorkflowUserShareAssociation ) \ - .filter_by( user=user ) \ - .filter( model.StoredWorkflow.deleted == False ) \ - .all() - return trans.fill_template( "workflow/configure_menu.mako", - workflows=workflows, - shared_by_others=shared_by_others, - ids_in_menu=ids_in_menu ) + message = None + refresh_frames = [] + user = trans.get_user() + ids_in_menu = set( [ x.stored_workflow_id for x in user.stored_workflow_menu_entries ] ) + workflows = trans.sa_session.query( model.StoredWorkflow ) \ + .filter_by( user=user, deleted=False ) \ + .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \ + .all() + shared_by_others = trans.sa_session \ + .query( model.StoredWorkflowUserShareAssociation ) \ + .filter_by( user=user ) \ + .filter( model.StoredWorkflow.deleted == False ) \ + .all() + return trans.fill_template( "workflow/configure_menu.mako", + workflows=workflows, + shared_by_others=shared_by_others, + ids_in_menu=ids_in_menu, + message=message, + refresh_frames=['tools'] ) def _workflow_to_svg_canvas( self, trans, stored ): workflow = stored.latest_workflow diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py @@ -3,16 +3,21 @@ import logging import os +import Queue import shutil +import stat +import subprocess import sys import tempfile import td_common_util +import threading import time import shlex -from subprocess import PIPE, Popen -from threading import Thread from contextlib import contextmanager +from galaxy.util import DATABASE_MAX_STRING_SIZE +from galaxy.util import DATABASE_MAX_STRING_SIZE_PRETTY +from galaxy.util import shrink_string_by_size from galaxy.util import unicodify from galaxy.util.template import fill_template from galaxy import eggs @@ -25,7 +30,6 @@ from fabric.api import env from fabric.api import hide from fabric.api import lcd -from fabric.api import local from fabric.api import settings from fabric.api import prefix from fabric.operations import _AttributeString @@ -33,20 +37,46 @@ log = logging.getLogger( __name__ ) INSTALLATION_LOG = 'INSTALLATION.log' -NO_CMD_OUTPUT_TIMEOUT = 600 VIRTUALENV_URL = 'https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.9.1.tar.gz' +class AsynchronousReader( threading.Thread ): + """ + A helper class to implement asynchronous reading of a stream in a separate thread. Read lines are pushed + onto a queue to be consumed in another thread. + """ + + def __init__( self, fd, queue ): + threading.Thread.__init__( self ) + self._fd = fd + self._queue = queue + self.lines = [] + + def run( self ): + """Read lines and put them on the queue.""" + thread_lock = threading.Lock() + thread_lock.acquire() + for line in iter( self._fd.readline, '' ): + stripped_line = line.rstrip() + self.lines.append( stripped_line ) + self._queue.put( stripped_line ) + thread_lock.release() + + def installation_complete( self ): + """Make sure there is more installation and compilation logging content expected.""" + return not self.is_alive() and self._queue.empty() + + class EnvFileBuilder( object ): def __init__( self, install_dir ): self.install_dir = install_dir self.return_code = 0 - def append_line( self, skip_if_contained=True, make_executable=True, **kwd ): + def append_line( self, make_executable=True, **kwd ): env_var_dict = dict( **kwd ) env_entry, env_file = self.create_or_update_env_shell_file( self.install_dir, env_var_dict ) - return_code = file_append( env_entry, env_file, skip_if_contained=skip_if_contained, make_executable=make_executable ) + return_code = file_append( env_entry, env_file, make_executable=make_executable ) self.return_code = self.return_code or return_code return self.return_code @@ -128,62 +158,93 @@ if int( version.split( "." )[ 0 ] ) < 1: raise NotImplementedError( "Install Fabric version 1.0 or later." ) +def close_file_descriptor( fd ): + """Attempt to close a file descriptor.""" + start_timer = time.time() + error = '' + while True: + try: + fd.close() + break + except IOError, e: + # Undoubtedly close() was called during a concurrent operation on the same file object. + log.debug( 'Error closing file descriptor: %s' % str( e ) ) + time.sleep( .5 ) + current_wait_time = time.time() - start_timer + if current_wait_time >= 600: + error = 'Error closing file descriptor: %s' % str( e ) + break + return error + def enqueue_output( stdout, stdout_queue, stderr, stderr_queue ): - ''' - This method places streamed stdout and stderr into a threaded IPC queue target, defined as follows: - - stdio_thread = Thread( target=enqueue_output, args=( process_handle.stdout, stdout_queue, process_handle.stderr, stderr_queue ) ) - - When input is received, it logs that input to the defined logger, and saves it to that thread's queue. The calling thread - can then retrieve that data using thread.stdout and thread.stderr. - ''' + """ + This method places streamed stdout and stderr into a threaded IPC queue target. Received data + is printed and saved to that thread's queue. The calling thread can then retrieve the data using + thread.stdout and thread.stderr. + """ stdout_logger = logging.getLogger( 'fabric_util.STDOUT' ) stderr_logger = logging.getLogger( 'fabric_util.STDERR' ) - for line in iter( stdout.readline, b'' ): + for line in iter( stdout.readline, '' ): output = line.rstrip() stdout_logger.debug( output ) stdout_queue.put( output ) - stdout.close() stdout_queue.put( None ) - for line in iter( stderr.readline, b'' ): + for line in iter( stderr.readline, '' ): output = line.rstrip() stderr_logger.debug( output ) stderr_queue.put( output ) - stderr.close() stderr_queue.put( None ) -def file_append( text, file_path, skip_if_contained=True, make_executable=True ): - ''' - Append a line to a file unless skip_if_contained is True and the line already exists in the file. This method creates the file - if it doesn't exist. If make_executable is True, the permissions on the file are set to executable by the owner. This method - is similar to a local version of fabric.contrib.files.append. - ''' - if not os.path.exists( file_path ): - local( 'touch %s' % file_path ) +def file_append( text, file_path, make_executable=True ): + """ + Append a line to a file unless the line already exists in the file. This method creates the file if + it doesn't exist. If make_executable is True, the permissions on the file are set to executable by + the owner. + """ + file_dir = os.path.dirname( file_path ) + if not os.path.exists( file_dir ): + try: + os.makedirs( file_dir ) + except Exception, e: + log.exception( str( e ) ) + return 1 + if os.path.exists( file_path ): + try: + new_env_file_contents = [] + env_file_contents = file( file_path, 'r' ).readlines() + # Clean out blank lines from the env.sh file. + for line in env_file_contents: + line = line.rstrip() + if line: + new_env_file_contents.append( line ) + env_file_contents = new_env_file_contents + except Exception, e: + log.exception( str( e ) ) + return 1 + else: + env_file_handle = open( file_path, 'w' ) + env_file_handle.close() + env_file_contents = [] if make_executable: - # Explicitly set the file to the received mode if valid. - with settings( hide( 'everything' ), warn_only=True ): - local( 'chmod +x %s' % file_path ) + # Explicitly set the file's executable bits. + try: + os.chmod( file_path, int( '111', base=8) | os.stat( file_path )[ stat.ST_MODE ] ) + except Exception, e: + log.exception( str( e ) ) + return 1 return_code = 0 # Convert the received text to a list, in order to support adding one or more lines to the file. if isinstance( text, basestring ): text = [ text ] for line in text: - # Build a regex to search for the relevant line in env.sh. - regex = td_common_util.egrep_escape( line ) - if skip_if_contained: - # If the line exists in the file, egrep will return a success. - with settings( hide( 'everything' ), warn_only=True ): - egrep_cmd = 'egrep "^%s$" %s' % ( regex, file_path ) - contains_line = local( egrep_cmd ).succeeded - if contains_line: - continue - # Append the current line to the file, escaping any single quotes in the line. - line = line.replace( "'", r"'\\''" ) - return_code = local( "echo '%s' >> %s" % ( line, file_path ) ).return_code - if return_code: - # Return upon the first error encountered. - return return_code + line = line.rstrip() + if line not in env_file_contents: + env_file_contents.append( line ) + try: + file( file_path, 'w' ).write( '\n'.join( env_file_contents ) ) + except Exception, e: + log.exception( str( e ) ) + return 1 return return_code def filter_actions_after_binary_installation( actions ): @@ -202,23 +263,122 @@ def handle_command( app, tool_dependency, install_dir, cmd, return_output=False ): context = app.install_model.context - with settings( warn_only=True ): - output = local( cmd, capture=True ) + output = handle_complex_command( cmd ) log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) ) - if output.return_code: + stdout = output.stdout + stderr = output.stderr + if len( stdout ) > DATABASE_MAX_STRING_SIZE: + print "Length of stdout > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY ) + stdout = shrink_string_by_size( stdout, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True ) + if len( stderr ) > DATABASE_MAX_STRING_SIZE: + print "Length of stderr > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY ) + stderr = shrink_string_by_size( stderr, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True ) + if output.return_code not in [ 0 ]: tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR - if output.stderr: - tool_dependency.error_message = unicodify( str( output.stderr )[ :32768 ] ) - elif output.stdout: - tool_dependency.error_message = unicodify( str( output.stdout )[ :32768 ] ) + if stderr: + tool_dependency.error_message = unicodify( stderr ) + elif stdout: + tool_dependency.error_message = unicodify( stdout ) else: - tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % ( str( cmd ), str( output.return_code ) ) + # We have a problem if there was no stdout and no stderr. + tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % \ + ( str( cmd ), str( output.return_code ) ) context.add( tool_dependency ) context.flush() if return_output: return output return output.return_code +def handle_complex_command( command ): + """ + Wrap subprocess.Popen in such a way that the stderr and stdout from running a shell command will + be captured and logged in nearly real time. This is similar to fabric.local, but allows us to + retain control over the process. This method is named "complex" because it uses queues and + threads to execute a command while capturing and displaying the output. + """ + wrapped_command = shlex.split( '/bin/sh -c "%s"' % str( command ) ) + # Launch the command as subprocess. A bufsize of 1 means line buffered. + process_handle = subprocess.Popen( wrapped_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1, + close_fds=False, + cwd=state.env[ 'lcwd' ] ) + pid = process_handle.pid + # Launch the asynchronous readers of the process' stdout and stderr. + stdout_queue = Queue.Queue() + stdout_reader = AsynchronousReader( process_handle.stdout, stdout_queue ) + stdout_reader.start() + stderr_queue = Queue.Queue() + stderr_reader = AsynchronousReader( process_handle.stderr, stderr_queue ) + stderr_reader.start() + # Place streamed stdout and stderr into a threaded IPC queue target so it can + # be printed and stored for later retrieval when generating the INSTALLATION.log. + stdio_thread = threading.Thread( target=enqueue_output, + args=( process_handle.stdout, + stdout_queue, + process_handle.stderr, + stderr_queue ) ) + thread_lock = threading.Lock() + thread_lock.acquire() + stdio_thread.start() + # Check the queues for output until there is nothing more to get. + start_timer = time.time() + while not stdout_reader.installation_complete() or not stderr_reader.installation_complete(): + # Show what we received from standard output. + while not stdout_queue.empty(): + try: + line = stdout_queue.get() + except Queue.Empty: + line = None + break + if line: + print line + start_timer = time.time() + else: + break + # Show what we received from standard error. + while not stderr_queue.empty(): + try: + line = stderr_queue.get() + except Queue.Empty: + line = None + break + if line: + print line + start_timer = time.time() + else: + stderr_queue.task_done() + break + # Sleep a bit before asking the readers again. + time.sleep( .1 ) + current_wait_time = time.time() - start_timer + if stdout_queue.empty() and stderr_queue.empty() and current_wait_time > td_common_util.NO_OUTPUT_TIMEOUT: + err_msg = "\nShutting down process id %s because it generated no output for the defined timeout period of %.1f seconds.\n" % \ + ( pid, td_common_util.NO_OUTPUT_TIMEOUT ) + stderr_reader.lines.append( err_msg ) + process_handle.kill() + break + thread_lock.release() + # Wait until each of the threads we've started terminate. The following calls will block each thread + # until it terminates either normally, through an unhandled exception, or until the timeout occurs. + stdio_thread.join( td_common_util.NO_OUTPUT_TIMEOUT ) + stdout_reader.join( td_common_util.NO_OUTPUT_TIMEOUT ) + stderr_reader.join( td_common_util.NO_OUTPUT_TIMEOUT ) + # Close subprocess' file descriptors. + error = close_file_descriptor( process_handle.stdout ) + error = close_file_descriptor( process_handle.stderr ) + stdout = '\n'.join( stdout_reader.lines ) + stderr = '\n'.join( stderr_reader.lines ) + # Handle error condition (deal with stdout being None, too) + output = _AttributeString( stdout.strip() if stdout else "" ) + errors = _AttributeString( stderr.strip() if stderr else "" ) + # Make sure the process has finished. + process_handle.poll() + output.return_code = process_handle.returncode + output.stderr = errors + return output + def handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, set_prior_environment_commands ): """ This method works with with a combination of three tool dependency definition tag sets, which are defined in the tool_dependencies.xml file in the @@ -447,7 +607,7 @@ with settings( warn_only=True ): for tarball_name in tarball_names: cmd = '''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR; export R_LIBS; - Rscript -e "install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)"''' % ( str( tarball_name ) ) + Rscript -e \\"install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)\\"''' % ( str( tarball_name ) ) cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) ) return_code = handle_command( app, tool_dependency, install_dir, cmd ) if return_code: @@ -591,8 +751,6 @@ # </action> filtered_actions = [ a for a in actions ] dir = install_dir - # We need to be careful in determining if the value of dir is a valid directory because we're dealing with 2 environments, the fabric local - # environment and the python environment. Checking the path as follows should work. full_path_to_dir = os.path.abspath( os.path.join( work_dir, dir ) ) if not os.path.exists( full_path_to_dir ): os.makedirs( full_path_to_dir ) @@ -763,8 +921,7 @@ def log_results( command, fabric_AttributeString, file_path ): """ - Write attributes of fabric.operations._AttributeString (which is the output of executing command using fabric's local() method) - to a specified log file. + Write attributes of fabric.operations._AttributeString to a specified log file. """ if os.path.exists( file_path ): logfile = open( file_path, 'ab' ) @@ -785,33 +942,10 @@ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-mtd" ) yield work_dir if os.path.exists( work_dir ): - local( 'rm -rf %s' % work_dir ) - -def run_local_command( command, capture_output=True, stream_output=True ): - # TODO: Overhaul this method. - import Queue - wrapped_command = shlex.split( "/bin/sh -c '%s'" % command ) - stdout_queue = Queue() - stderr_queue = Queue() - process_handle = Popen( wrapped_command, stdout=PIPE, stderr=PIPE, bufsize=1, close_fds=False, cwd=state.env[ 'lcwd' ] ) - stdio_thread = Thread( target=enqueue_output, args=( process_handle.stdout, stdout_queue, process_handle.stderr, stderr_queue ) ) - stdio_thread.daemon = True - stdio_thread.start() - stdout, stderr = wait_for_process( process_handle, stream_output, stdout_queue, stderr_queue ) - # Handle error condition (deal with stdout being None, too) - output = _AttributeString( stdout.strip() if stdout else "" ) - errors = _AttributeString( stderr.strip() if stderr else "" ) - output.failed = False - output.return_code = process_handle.returncode - output.stderr = errors - if process_handle.returncode not in env.ok_ret_codes: - output.failed = True - message = "Encountered an error (return code %s) while executing '%s'" % ( process_handle.returncode, command ) - log.error( message ) - output.succeeded = not output.failed - # If we were capturing, this will be a string; otherwise it will be None. - if capture_output: - return output + try: + shutil.rmtree( work_dir ) + except Exception, e: + log.exception( str( e ) ) def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ): """General Galaxy environment configuration. This method is not currently used.""" @@ -820,52 +954,4 @@ env.host_string = host env.shell = shell env.use_sudo = False - env.safe_cmd = local return env - -def wait_for_process( process_handle, stream_output, stdout_queue, stderr_queue ): - # TODO: Overhaul this method. - import Queue - pid = process_handle.pid - standard_out = [] - standard_err = [] - process_handle.wait() - # Generate stdout. - while True: - try: - line = stdout_queue.get( timeout=NO_CMD_OUTPUT_TIMEOUT ) - except Queue.Empty: - err_msg = "\nShutting down process id %s because it generated no stdout for the defined timout period of %d seconds.\n" % \ - ( pid, NO_CMD_OUTPUT_TIMEOUT ) - print err_msg - standard_out.append( err_msg ) - stdout_queue.task_done() - process_handle.kill() - break - else: - if line: - standard_out.append( line ) - else: - stdout_queue.task_done() - break - # Generate stderr. - while True: - try: - line = stderr_queue.get( timeout=NO_CMD_OUTPUT_TIMEOUT ) - except Queue.Empty: - err_msg = "\nShutting down process id %s because it generated no stderr for the defined timout period of %d seconds.\n" % \ - ( pid, NO_CMD_OUTPUT_TIMEOUT ) - print err_msg - standard_err.append( err_msg ) - stderr_queue.task_done() - process_handle.kill() - break - else: - if line: - standard_err.append( line ) - else: - stderr_queue.task_done() - break - stdout = '\n'.join( standard_out ) - stderr = '\n'.join( standard_err ) - return stdout, stderr diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py @@ -350,7 +350,7 @@ tool_dependency_type='package' ) if not can_install_tool_dependency: log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), so returning it." % \ - ( str( tool_dependency.name, str( tool_dependency.version ) ) ) ) + ( str( tool_dependency.name ), str( tool_dependency.version ) ) ) return tool_dependency else: can_install_tool_dependency = True @@ -884,7 +884,7 @@ if env_var_version == '1.0': # Create this tool dependency's env.sh file. env_file_builder = fabric_util.EnvFileBuilder( install_dir ) - return_code = env_file_builder.append_line( skip_if_contained=True, make_executable=True, **env_var_dict ) + return_code = env_file_builder.append_line( make_executable=True, **env_var_dict ) if return_code: error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \ ( str( tool_dependency.name ), str( return_code ) ) diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py @@ -4,6 +4,7 @@ import shutil import sys import tarfile +import time import traceback import urllib2 import zipfile @@ -13,9 +14,12 @@ log = logging.getLogger( __name__ ) +# Set no activity timeout to 20 minutes. +NO_OUTPUT_TIMEOUT = 1200.0 + class CompressedFile( object ): - + def __init__( self, file_path, mode='r' ): if istar( file_path ): self.file_type = 'tar' @@ -30,7 +34,7 @@ self.archive = getattr( self, method )( file_path, mode ) else: raise NameError( 'File type %s specified, no open method found.' % self.file_type ) - + def extract( self, path ): '''Determine the path to which the archive should be extracted.''' contents = self.getmembers() @@ -58,36 +62,36 @@ os.makedirs( extraction_path ) self.archive.extractall( os.path.join( extraction_path ) ) return os.path.abspath( extraction_path ) - + def getmembers_tar( self ): return self.archive.getmembers() - + def getmembers_zip( self ): return self.archive.infolist() - + def getname_tar( self, item ): return item.name - + def getname_zip( self, item ): return item.filename - + def getmember( self, name ): for member in self.getmembers(): if self.getname( member ) == name: return member - + def getmembers( self ): return getattr( self, 'getmembers_%s' % self.type )() - + def getname( self, member ): return getattr( self, 'getname_%s' % self.type )( member ) - + def isdir( self, member ): return getattr( self, 'isdir_%s' % self.type )( member ) - + def isdir_tar( self, member ): return member.isdir() - + def isdir_zip( self, member ): if member.filename.endswith( os.sep ): return True @@ -97,7 +101,7 @@ if not self.isdir( member ): return True return False - + def open_tar( self, filepath, mode ): return tarfile.open( filepath, mode, errorlevel=0 ) @@ -146,9 +150,7 @@ return None def download_binary( url, work_dir ): - ''' - Download a pre-compiled binary from the specified URL. - ''' + """Download a pre-compiled binary from the specified URL.""" downloaded_filename = os.path.split( url )[ -1 ] dir = url_download( work_dir, downloaded_filename, url, extract=False ) return downloaded_filename @@ -179,7 +181,6 @@ return os.path.abspath( os.path.join( root, name ) ) return None - def get_env_shell_file_paths( app, elem ): # Currently only the following tag set is supported. # <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056"> @@ -455,6 +456,8 @@ file_path = os.path.join( install_dir, downloaded_file_name ) src = None dst = None + # Set a timer so we don't sit here forever. + start_time = time.time() try: src = urllib2.urlopen( download_url ) dst = open( file_path, 'wb' ) @@ -464,8 +467,14 @@ dst.write( chunk ) else: break - except: - raise + time_taken = time.time() - start_time + if time_taken > NO_OUTPUT_TIMEOUT: + err_msg = 'Downloading from URL %s took longer than the defined timeout period of %.1f seconds.' % \ + ( str( download_url ), NO_OUTPUT_TIMEOUT ) + raise Exception( err_msg ) + except Exception, e: + err_msg = err_msg = 'Error downloading from URL\n%s:\n%s' % ( str( download_url ), str( e ) ) + raise Exception( err_msg ) finally: if src: src.close() diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -274,7 +274,11 @@ return False, None, error_message def handle_repository_dependency_elem( trans, elem, unpopulate=False ): + """Populate or unpopulate repository tags.""" # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" /> + # <repository changeset_revision="xxx" name="package_xorg_macros_1_17_1" owner="test" toolshed="yyy"> + # <package name="xorg_macros" version="1.17.1" /> + # </repository> error_message = '' name = elem.get( 'name' ) owner = elem.get( 'owner' ) @@ -285,6 +289,23 @@ revised = False toolshed = elem.get( 'toolshed' ) changeset_revision = elem.get( 'changeset_revision' ) + sub_elems = [ child_elem for child_elem in list( elem ) ] + if len( sub_elems ) > 0: + # At this point, a <repository> tag will point only to a package. + # <package name="xorg_macros" version="1.17.1" /> + # Coerce the list to an odict(). + sub_elements = odict() + packages = [] + for sub_elem in sub_elems: + sub_elem_type = sub_elem.tag + sub_elem_name = sub_elem.get( 'name' ) + sub_elem_version = sub_elem.get( 'version' ) + if sub_elem_type and sub_elem_name and sub_elem_version: + packages.append( ( sub_elem_name, sub_elem_version ) ) + sub_elements[ 'packages' ] = packages + else: + # Set to None. + sub_elements = None if unpopulate: # We're exporting the repository, so eliminate all toolshed and changeset_revision attributes from the <repository> tag. if toolshed or changeset_revision: @@ -292,7 +313,7 @@ attributes[ 'name' ] = name attributes[ 'owner' ] = owner attributes[ 'prior_installation_required' ] = elem.get( 'prior_installation_required', 'False' ) - elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=None ) + elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements ) revised = True return revised, elem, error_message # From here on we're populating the toolshed and changeset_revisions if necessary. @@ -321,10 +342,12 @@ return revised, elem, error_message def handle_repository_dependency_sub_elem( trans, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ): - # This method populates the toolshed and changeset_revision attributes for each of the following. - # <action type="set_environment_for_install"> - # <action type="setup_r_environment"> - # <action type="setup_ruby_environment"> + """ + Populate or unpopulate the toolshed and changeset_revision attributes for each of the following tag sets. + <action type="set_environment_for_install"> + <action type="setup_r_environment"> + <action type="setup_ruby_environment"> + """ error_message = '' for repo_index, repo_elem in enumerate( action_elem ): # Make sure to skip comments and tags that are not <repository>. @@ -343,7 +366,8 @@ def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ): """ - Populate or unpopulate the tooshed and changeset_revision attributes of each <repository> tag defined within a tool_dependencies.xml file. + Populate or unpopulate the tooshed and changeset_revision attributes of each <repository> + tag defined within a tool_dependencies.xml file. """ altered = False error_message = '' diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/tool_shed/util/tool_util.py --- a/lib/tool_shed/util/tool_util.py +++ b/lib/tool_shed/util/tool_util.py @@ -1017,7 +1017,8 @@ replacement_tool_version = None # Since we are going to remove the tool from the section, replace it with the newest loaded version of the tool. for available_tool_version in available_tool_versions: - if available_tool_version.id in tool_section.elems.keys(): + available_tool_section_id, available_tool_section_name = available_tool_version.get_panel_section() + if available_tool_version.id in tool_section.elems.keys() or section_key == available_tool_section_id: replacement_tool_key = 'tool_%s' % str( available_tool_version.id ) replacement_tool_version = available_tool_version break @@ -1057,7 +1058,8 @@ replacement_tool_version = None # Since we are going to remove the tool from the section, replace it with the newest loaded version of the tool. for available_tool_version in available_tool_versions: - if available_tool_version.id in trans.app.toolbox.tool_panel.keys(): + available_tool_section_id, available_tool_section_name = available_tool_version.get_panel_section() + if available_tool_version.id in trans.app.toolbox.tool_panel.keys() or not available_tool_section_id: replacement_tool_key = 'tool_%s' % str( available_tool_version.id ) replacement_tool_version = available_tool_version break diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa lib/tool_shed/util/xml_util.py --- a/lib/tool_shed/util/xml_util.py +++ b/lib/tool_shed/util/xml_util.py @@ -47,7 +47,7 @@ def create_element( tag, attributes=None, sub_elements=None ): """ Create a new element whose tag is the value of the received tag, and whose attributes are all - key / value pairs in the received the attributes and sub_elements. + key / value pairs in the received attributes and sub_elements. """ if tag: elem = XmlET.Element( tag ) @@ -56,14 +56,22 @@ for k, v in attributes.items(): elem.set( k, v ) if sub_elements: - # The received attributes is an odict as well. These handle information that tends to be + # The received attributes is an odict. These handle information that tends to be # long text including paragraphs (e.g., description and long_description. for k, v in sub_elements.items(): # Don't include fields that are blank. if v: - sub_elem = XmlET.SubElement( elem, k ) - if isinstance( v, list ): - # If the sub_elem is a list, then it must be a list of tuples where the first + if k == 'packages': + # The received sub_elements is an odict whose key is 'packages' and whose + # value is a list of ( name, version ) tuples. + for v_tuple in v: + sub_elem = XmlET.SubElement( elem, 'package' ) + sub_elem_name, sub_elem_version = v_tuple + sub_elem.set( 'name', sub_elem_name ) + sub_elem.set( 'version', sub_elem_version ) + elif isinstance( v, list ): + sub_elem = XmlET.SubElement( elem, k ) + # If v is a list, then it must be a list of tuples where the first # item is the tag and the second item is the text value. for v_tuple in v: if len( v_tuple ) == 2: @@ -74,6 +82,7 @@ v_elem = XmlET.SubElement( sub_elem, v_tag ) v_elem.text = v_text else: + sub_elem = XmlET.SubElement( elem, k ) sub_elem.text = v return elem return None diff -r ed45cbef6347a1803ecb53cb99c8c07bad1d14aa -r 95050c5d33b95fd5479a10679875e51609ff8bfa static/scripts/galaxy.pages.js --- a/static/scripts/galaxy.pages.js +++ b/static/scripts/galaxy.pages.js @@ -452,14 +452,14 @@ // item_class='History'). var item_elt_id = item_info.iclass + "-" + item_id; var item_embed_html = - "<p><div id='" + item_elt_id + "' class='embedded-item " + item_info.singular.toLowerCase() + + "<div id='" + item_elt_id + "' class='embedded-item " + item_info.singular.toLowerCase() + " placeholder'> \ <p class='title'>Embedded Galaxy " + item_info.singular + " '" + item_name + "'</p> \ <p class='content'> \ [Do not edit this block; Galaxy will fill it in with the annotated " + item_info.singular.toLowerCase() + " when it is displayed.] \ </p> \ - </div></p>"; + </div>"; // Insert embedded item into document. wym.insert(" "); // Needed to prevent insertion from occurring in child element in webkit browsers. @@ -468,18 +468,18 @@ // TODO: can we fix this? // Due to oddities of wym.insert() [likely due to inserting a <div> and/or a complete paragraph], an // empty paragraph (or two!) may be included either before an embedded item. Remove these paragraphs. - $("#" + item_elt_id, wym._doc.body).each( function() { - // Remove previous empty paragraphs. - var removing = true; - while (removing) - { - var prev_elt = $(this).prev(); - if ( prev_elt.length != 0 && jQuery.trim(prev_elt.text()) == "" ) - prev_elt.remove(); - else - removing = false; - } - }); + //$("#" + item_elt_id, wym._doc.body).each( function() { + // // Remove previous empty paragraphs. + // var removing = true; + // while (removing) + // { + // var prev_elt = $(this).prev(); + // if ( prev_elt.length != 0 && jQuery.trim(prev_elt.text()) == "" ) + // prev_elt.remove(); + // else + // removing = false; + // } + //}); }); hide_modal(); @@ -706,4 +706,4 @@ // editor.dialog(Galaxy.DIALOG_EMBED_PAGE); //} }); -}); \ No newline at end of file +}); This diff is so big that we needed to truncate the remainder. https://bitbucket.org/galaxy/galaxy-central/commits/4b2ac910ca82/ Changeset: 4b2ac910ca82 Branch: model-info User: kellrott Date: 2014-03-04 22:40:14 Summary: Moving unique dataset id from dataset_id to uuid Affected #: 1 file diff -r 95050c5d33b95fd5479a10679875e51609ff8bfa -r 4b2ac910ca82deda745ade68f162c9d2ca8aee1c lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -17,6 +17,7 @@ import json import socket import time +from uuid import UUID, uuid4 from string import Template from itertools import ifilter from itertools import chain @@ -1172,7 +1173,7 @@ file_path = "/tmp/" object_store = None # This get initialized in mapping.py (method init) by app.py engine = None - def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True ): + def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True, uuid=None ): self.id = id self.state = state self.deleted = False @@ -1181,7 +1182,11 @@ self.external_filename = external_filename self._extra_files_path = extra_files_path self.file_size = file_size - self.uuid = None + if uuid is None: + self.uuid = uuid4() + else: + self.uuid = UUID(str(uuid)) + print "dataset uuid", self.uuid def get_file_name( self ): if not self.external_filename: @@ -1837,7 +1842,6 @@ rval = dict( id = hda.id, hda_ldda = 'hda', uuid = ( lambda uuid: str( uuid ) if uuid else None )( hda.dataset.uuid ), - dataset_id = hda.dataset.id, hid = hda.hid, file_ext = hda.ext, peek = ( lambda hda: hda.display_peek() if hda.peek and hda.peek != 'no peek' else None )( hda ), @@ -2267,7 +2271,6 @@ file_size = 0 rval = dict( id = ldda.id, hda_ldda = 'ldda', - dataset_id = ldda.dataset.id, model_class = self.__class__.__name__, name = ldda.name, deleted = ldda.deleted, https://bitbucket.org/galaxy/galaxy-central/commits/b3a0ba812dba/ Changeset: b3a0ba812dba Branch: model-info User: kellrott Date: 2014-03-05 00:46:10 Summary: Adding script to generate UUIDs for datasets where the value is null. This is to backfill previously generated records (from now on UUID will be generated during creation) Affected #: 2 files diff -r 4b2ac910ca82deda745ade68f162c9d2ca8aee1c -r b3a0ba812dba3208b5e24a625b0133d1456a7b1b scripts/cleanup_datasets/populate_uuid.py --- /dev/null +++ b/scripts/cleanup_datasets/populate_uuid.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +""" +Populates blank uuid fields in datasets with randomly generated values + +Going forward, these ids will be generated for all new datasets. This +script fixes datasets that were generated before the change. +""" + +import sys, os, ConfigParser +import galaxy.app +from galaxy.util.bunch import Bunch +import galaxy.datatypes.tabular +from galaxy.model.orm.scripts import get_config +from galaxy import eggs +from galaxy.model import mapping +import uuid + +eggs.require( "SQLAlchemy" ) + +from sqlalchemy import * + +assert sys.version_info[:2] >= ( 2, 4 ) + +def main(): + ini_file = sys.argv.pop(1) + config = get_config(ini_file) + + model = mapping.init( ini_file, config['db_url'], create_tables = False ) + + for row in model.context.query( model.Dataset ): + if row.uuid is None: + row.uuid = uuid.uuid4() + print "Setting dataset:", row.id, " UUID to ", row.uuid + model.context.flush() + + +if __name__ == "__main__": + main() \ No newline at end of file diff -r 4b2ac910ca82deda745ade68f162c9d2ca8aee1c -r b3a0ba812dba3208b5e24a625b0133d1456a7b1b scripts/cleanup_datasets/populate_uuid.sh --- /dev/null +++ b/scripts/cleanup_datasets/populate_uuid.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +cd `dirname $0`/../.. +export PYTHONPATH=./lib/ +python ./scripts/cleanup_datasets/populate_uuid.py ./universe_wsgi.ini $@ \ No newline at end of file https://bitbucket.org/galaxy/galaxy-central/commits/61719e07f398/ Changeset: 61719e07f398 Branch: model-info User: kellrott Date: 2014-03-05 00:50:40 Summary: Deleting rouge print statement Affected #: 1 file diff -r b3a0ba812dba3208b5e24a625b0133d1456a7b1b -r 61719e07f39897f017cb5036f96ffbb7081a5c25 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1186,7 +1186,6 @@ self.uuid = uuid4() else: self.uuid = UUID(str(uuid)) - print "dataset uuid", self.uuid def get_file_name( self ): if not self.external_filename: https://bitbucket.org/galaxy/galaxy-central/commits/54a2ec26ae5f/ Changeset: 54a2ec26ae5f User: dannon Date: 2014-03-13 18:48:00 Summary: Merged in kellrott/galaxy-central/model-info (pull request #338) Patch to expose the actual dataset id in the LDDA and HDA to_dict calls (in addition to the instance id). Affected #: 3 files diff -r b194d6a4797fc91a8ad9a1901465865d884d612d -r 54a2ec26ae5f8f2bf78b8b964d4279533e4a905a lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -17,6 +17,7 @@ import json import socket import time +from uuid import UUID, uuid4 from string import Template from itertools import ifilter from itertools import chain @@ -1172,7 +1173,7 @@ file_path = "/tmp/" object_store = None # This get initialized in mapping.py (method init) by app.py engine = None - def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True ): + def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True, uuid=None ): self.id = id self.state = state self.deleted = False @@ -1181,7 +1182,10 @@ self.external_filename = external_filename self._extra_files_path = extra_files_path self.file_size = file_size - self.uuid = None + if uuid is None: + self.uuid = uuid4() + else: + self.uuid = UUID(str(uuid)) def get_file_name( self ): if not self.external_filename: diff -r b194d6a4797fc91a8ad9a1901465865d884d612d -r 54a2ec26ae5f8f2bf78b8b964d4279533e4a905a scripts/cleanup_datasets/populate_uuid.py --- /dev/null +++ b/scripts/cleanup_datasets/populate_uuid.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +""" +Populates blank uuid fields in datasets with randomly generated values + +Going forward, these ids will be generated for all new datasets. This +script fixes datasets that were generated before the change. +""" + +import sys, os, ConfigParser +import galaxy.app +from galaxy.util.bunch import Bunch +import galaxy.datatypes.tabular +from galaxy.model.orm.scripts import get_config +from galaxy import eggs +from galaxy.model import mapping +import uuid + +eggs.require( "SQLAlchemy" ) + +from sqlalchemy import * + +assert sys.version_info[:2] >= ( 2, 4 ) + +def main(): + ini_file = sys.argv.pop(1) + config = get_config(ini_file) + + model = mapping.init( ini_file, config['db_url'], create_tables = False ) + + for row in model.context.query( model.Dataset ): + if row.uuid is None: + row.uuid = uuid.uuid4() + print "Setting dataset:", row.id, " UUID to ", row.uuid + model.context.flush() + + +if __name__ == "__main__": + main() \ No newline at end of file diff -r b194d6a4797fc91a8ad9a1901465865d884d612d -r 54a2ec26ae5f8f2bf78b8b964d4279533e4a905a scripts/cleanup_datasets/populate_uuid.sh --- /dev/null +++ b/scripts/cleanup_datasets/populate_uuid.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +cd `dirname $0`/../.. +export PYTHONPATH=./lib/ +python ./scripts/cleanup_datasets/populate_uuid.py ./universe_wsgi.ini $@ \ No newline at end of file Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org