commit/galaxy-central: 4 new changesets
4 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/f2b125ad393e/ Changeset: f2b125ad393e User: nsoranzo Date: 2014-11-04 15:46:42+00:00 Summary: Remove unused and buggy function. Affected #: 2 files diff -r f9e8de1c84b2d60cc727ec5b64da6fe67616b7ed -r f2b125ad393e0a1a0a72ddfd5497855576b5d1dd lib/galaxy/web/framework/webapp.py --- a/lib/galaxy/web/framework/webapp.py +++ b/lib/galaxy/web/framework/webapp.py @@ -504,9 +504,9 @@ for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ): username = username.replace( char, '-' ) # Find a unique username - user can change it later - if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ): + if self.sa_session.query( self.app.model.User ).filter_by( username=username ).first(): i = 1 - while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ): + while self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first(): i += 1 username += '-' + str(i) user.username = username @@ -701,11 +701,6 @@ def template_context( self ): return dict() - def make_form_data( self, name, **kwargs ): - rval = self.template_context[name] = FormData() - rval.values.update( kwargs ) - return rval - def set_message( self, message, type=None ): """ Convenience method for setting the 'message' and 'message_type' diff -r f9e8de1c84b2d60cc727ec5b64da6fe67616b7ed -r f2b125ad393e0a1a0a72ddfd5497855576b5d1dd lib/galaxy/webapps/demo_sequencer/framework/__init__.py --- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py +++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py @@ -116,10 +116,7 @@ @galaxy.web.framework.base.lazy_property def template_context( self ): return dict() - def make_form_data( self, name, **kwargs ): - rval = self.template_context[name] = FormData() - rval.values.update( kwargs ) - return rval + def set_message( self, message, type=None ): """ Convenience method for setting the 'message' and 'message_type' https://bitbucket.org/galaxy/galaxy-central/commits/636f0fff0061/ Changeset: 636f0fff0061 User: nsoranzo Date: 2014-11-05 13:28:14+00:00 Summary: Add keys() and values() methods to Bunch. Affected #: 1 file diff -r f2b125ad393e0a1a0a72ddfd5497855576b5d1dd -r 636f0fff006148e875f55bab684821cc60b0630f lib/galaxy/util/bunch.py --- a/lib/galaxy/util/bunch.py +++ b/lib/galaxy/util/bunch.py @@ -17,6 +17,12 @@ def items(self): return self.__dict__.items() + def keys(self): + return self.__dict__.keys() + + def values(self): + return self.__dict__.values() + def __str__(self): return '%s' % self.__dict__ https://bitbucket.org/galaxy/galaxy-central/commits/e14c29154b8a/ Changeset: e14c29154b8a User: nsoranzo Date: 2014-11-04 15:48:33+00:00 Summary: Bugs, doc and pylint fixes. Affected #: 6 files diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -13,7 +13,6 @@ from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable -from galaxy import exceptions from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException from galaxy.exceptions import MessageException @@ -27,7 +26,7 @@ from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField from galaxy.workflow.modules import module_factory, WorkflowModuleInjector, MissingToolException -from galaxy.model.orm import eagerload, eagerload_all, desc, not_ +from galaxy.model.orm import eagerload, eagerload_all, desc from galaxy.security.validate_user_input import validate_publicname from galaxy.util.sanitize_html import sanitize_html from galaxy.model.item_attrs import Dictifiable, UsesAnnotations @@ -85,7 +84,7 @@ Convenience method to get a model object with the specified checks. """ return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted ) - + # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin # but *don't* then becomes difficult #def security_check( self, trans, item, check_ownership=False, check_accessible=False ): @@ -322,7 +321,7 @@ # Initialize count dict with all states. state_count_dict = {} - for k, state in trans.app.model.Dataset.states.items(): + for state in trans.app.model.Dataset.states.values(): state_count_dict[ state ] = 0 # Process query results, adding to count dict. @@ -370,7 +369,7 @@ # init counts, ids for each state state_counts = {} state_ids = {} - for key, state in trans.app.model.Dataset.states.items(): + for state in trans.app.model.Dataset.states.values(): state_counts[ state ] = 0 state_ids[ state ] = [] @@ -566,7 +565,7 @@ # DEPRECATION: We still support unencoded ids for backward compatibility try: dataset_id = int( dataset_id ) - except ValueError, v_err: + except ValueError: raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) ) try: @@ -589,7 +588,7 @@ error( "You are not allowed to access this dataset" ) if check_state and data.state == trans.model.Dataset.states.UPLOAD: - return trans.show_error_message( "Please wait until this dataset finishes uploading " + return trans.show_error_message( "Please wait until this dataset finishes uploading " + "before attempting to view it." ) return data @@ -651,7 +650,7 @@ check_ownership=check_ownership, check_accessible=check_accessible, check_state=check_state ) - except Exception, exception: + except Exception: pass hdas.append( hda ) return hdas @@ -711,7 +710,7 @@ # ---- return here if deleted AND purged OR can't access purged = ( hda.purged or hda.dataset.purged ) - if ( hda.deleted and purged ): + if hda.deleted and purged: #TODO: to_dict should really go AFTER this - only summary data return trans.security.encode_dict_ids( hda_dict ) @@ -747,10 +746,6 @@ #TODO: it may also be wiser to remove from here and add as API call that loads the visualizations # when the visualizations button is clicked (instead of preloading/pre-checking) - # ---- return here if deleted - if hda.deleted and not purged: - return trans.security.encode_dict_ids( hda_dict ) - return trans.security.encode_dict_ids( hda_dict ) def get_inaccessible_hda_dict( self, trans, hda ): @@ -892,7 +887,8 @@ # or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) ) def can_current_user_add_to_library_item( self, trans, item ): - if not trans.user: return False + if not trans.user: + return False return ( ( trans.user_is_admin() ) or ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) ) @@ -1411,11 +1407,6 @@ # Get data provider. track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset ) - if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ): - hda_ldda = "hda" - elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ): - hda_ldda = "ldda" - # Get track definition. return { "track_type": dataset.datatype.track_type, @@ -1703,7 +1694,7 @@ data['name'] = workflow.name data['annotation'] = annotation_str if workflow.uuid is not None: - data['uuid'] = str(workflow.uuid) + data['uuid'] = str(workflow.uuid) data['steps'] = {} # For each step, rebuild the form and encode the state for step in workflow.steps: @@ -1741,18 +1732,16 @@ step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str ) # User outputs step_dict['user_outputs'] = [] - """ - module_outputs = module.get_data_outputs() - step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) - for output in step_outputs: - name = output.output_name - annotation = "" - for module_output in module_outputs: - if module_output.get( 'name', None ) == name: - output_type = module_output.get( 'extension', '' ) - break - data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } - """ +# module_outputs = module.get_data_outputs() +# step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) +# for output in step_outputs: +# name = output.output_name +# annotation = "" +# for module_output in module_outputs: +# if module_output.get( 'name', None ) == name: +# output_type = module_output.get( 'extension', '' ) +# break +# data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } # All step outputs step_dict['outputs'] = [] @@ -2137,7 +2126,7 @@ # We need the type of each template field widget widgets = item.get_template_widgets( trans ) # The list of widgets may include an AddressField which we need to save if it is new - for index, widget_dict in enumerate( widgets ): + for widget_dict in widgets: widget = widget_dict[ 'widget' ] if isinstance( widget, AddressField ): value = util.restore_text( params.get( widget.name, '' ) ) @@ -2218,7 +2207,7 @@ trans.sa_session.flush() info_association = sra.run else: - info_association = assoc.run + info_association = assoc.run else: info_association = None if info_association: @@ -2362,7 +2351,7 @@ def widget_fields_have_contents( self, widgets ): # Return True if any of the fields in widgets contain contents, widgets is a list of dictionaries that looks something like: # [{'widget': <galaxy.web.form_builder.TextField object at 0x10867aa10>, 'helptext': 'Field 0 help (Optional)', 'label': 'Field 0'}] - for i, field in enumerate( widgets ): + for field in widgets: if ( isinstance( field[ 'widget' ], TextArea ) or isinstance( field[ 'widget' ], TextField ) ) and field[ 'widget' ].value: return True if isinstance( field[ 'widget' ], SelectField ) and field[ 'widget' ].options: @@ -2383,7 +2372,7 @@ def clean_field_contents( self, widgets, **kwd ): field_contents = {} - for index, widget_dict in enumerate( widgets ): + for widget_dict in widgets: widget = widget_dict[ 'widget' ] value = kwd.get( widget.name, '' ) if isinstance( widget, CheckboxField ): @@ -2432,7 +2421,7 @@ ''' params = util.Params( kwd ) values = {} - for index, field in enumerate( form_definition.fields ): + for field in form_definition.fields: field_type = field[ 'type' ] field_name = field[ 'name' ] input_value = params.get( field_name, '' ) @@ -2584,7 +2573,7 @@ if message: return trans.fill_template( '/sharing_base.mako', item=self.get_item( trans, id ), message=message, status='error' ) user.username = username - trans.sa_session.flush + trans.sa_session.flush() return self.sharing( trans, id, **kwargs ) @web.expose @@ -2646,34 +2635,34 @@ @web.require_login( "share Galaxy items" ) def sharing( self, trans, id, **kwargs ): """ Handle item sharing. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.expose @web.require_login( "share Galaxy items" ) def share( self, trans, id=None, email="", **kwd ): """ Handle sharing an item with a particular user. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.expose def display_by_username_and_slug( self, trans, username, slug ): """ Display item by username and slug. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.json @web.require_login( "get item name and link" ) def get_name_and_link_async( self, trans, id=None ): """ Returns item's name and link. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.expose @web.require_login("get item content asynchronously") def get_item_content_async( self, trans, id ): """ Returns item content in HTML format. """ - raise "Unimplemented Method" + raise NotImplementedError() def get_item( self, trans, id ): """ Return item based on id. """ - raise "Unimplemented Method" + raise NotImplementedError() class UsesQuotaMixin( object ): @@ -2690,7 +2679,7 @@ def _get_user_tags( self, trans, item_class_name, id ): user = trans.user tagged_item = self._get_tagged_item( trans, item_class_name, id ) - return [ tag for tag in tagged_item.tags if ( tag.user == user ) ] + return [ tag for tag in tagged_item.tags if tag.user == user ] def _get_tagged_item( self, trans, item_class_name, id, check_ownership=True ): tagged_item = self.get_object( trans, id, item_class_name, check_ownership=check_ownership, check_accessible=True ) @@ -2754,7 +2743,6 @@ return sorted( tags ) - class UsesExtendedMetadataMixin( SharableItemSecurityMixin ): """ Mixin for getting and setting item extended metadata. """ @@ -2844,10 +2832,10 @@ yield prefix, ("%s" % (meta)).encode("utf8", errors='replace') -""" -Deprecated: `BaseController` used to be available under the name `Root` -""" class ControllerUnavailable( Exception ): + """ + Deprecated: `BaseController` used to be available under the name `Root` + """ pass ## ---- Utility methods ------------------------------------------------------- diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/api/histories.py --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -92,7 +92,7 @@ :rtype: dictionary :returns: detailed history information from - :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_history_dict` + :func:`galaxy.web.base.controller.UsesHistoryMixin.get_history_dict` """ history_id = id deleted = string_as_bool( deleted ) diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/api/lda_datasets.py --- a/lib/galaxy/webapps/galaxy/api/lda_datasets.py +++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py @@ -480,7 +480,7 @@ tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd ) tool_params = state.inputs dataset_upload_inputs = [] - for input_name, input in tool.inputs.iteritems(): + for input in tool.inputs.itervalues(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) ) @@ -536,7 +536,7 @@ * POST /api/libraries/datasets/download/{format} Downloads requested datasets (identified by encoded IDs) in requested format. - example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ldda_ids%255B%255D=a0d84b45643a2678&ldda_ids%255B%255D=fe38c84dcd46c828`` + example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828`` .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed' @@ -554,7 +554,7 @@ datasets_to_download = kwd.get( 'ld_ids%5B%5D', None ) if datasets_to_download is None: datasets_to_download = kwd.get( 'ld_ids', None ) - if ( datasets_to_download is not None ): + if datasets_to_download is not None: datasets_to_download = util.listify( datasets_to_download ) for dataset_id in datasets_to_download: try: @@ -570,128 +570,128 @@ raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids to download.' ) if format in [ 'zip', 'tgz', 'tbz' ]: - # error = False - killme = string.punctuation + string.whitespace - trantab = string.maketrans( killme, '_'*len( killme ) ) - try: - outext = 'zip' - if format == 'zip': - # Can't use mkstemp - the file must not exist first - tmpd = tempfile.mkdtemp() - util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid ) - tmpf = os.path.join( tmpd, 'library_download.' + format ) - if trans.app.config.upstream_gzip: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True ) + # error = False + killme = string.punctuation + string.whitespace + trantab = string.maketrans( killme, '_'*len( killme ) ) + try: + outext = 'zip' + if format == 'zip': + # Can't use mkstemp - the file must not exist first + tmpd = tempfile.mkdtemp() + util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid ) + tmpf = os.path.join( tmpd, 'library_download.' + format ) + if trans.app.config.upstream_gzip: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True ) + else: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) + archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) ) + elif format == 'tgz': + if trans.app.config.upstream_gzip: + archive = StreamBall( 'w|' ) + outext = 'tar' + else: + archive = StreamBall( 'w|gz' ) + outext = 'tgz' + elif format == 'tbz': + archive = StreamBall( 'w|bz2' ) + outext = 'tbz2' + except ( OSError, zipfile.BadZipfile ): + log.exception( "Unable to create archive for download" ) + raise exceptions.InternalServerError( "Unable to create archive for download." ) + except Exception: + log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] ) + raise exceptions.InternalServerError( "Unable to create archive for download." ) + composite_extensions = trans.app.datatypes_registry.get_composite_extensions() + seen = [] + for ld in library_datasets: + ldda = ld.library_dataset_dataset_association + ext = ldda.extension + is_composite = ext in composite_extensions + path = "" + parent_folder = ldda.library_dataset.folder + while parent_folder is not None: + # Exclude the now-hidden "root folder" + if parent_folder.parent is None: + path = os.path.join( parent_folder.library_root[ 0 ].name, path ) + break + path = os.path.join( parent_folder.name, path ) + parent_folder = parent_folder.parent + path += ldda.name + while path in seen: + path += '_' + seen.append( path ) + zpath = os.path.split(path)[ -1 ] # comes as base_name/fname + outfname, zpathext = os.path.splitext( zpath ) + + if is_composite: # need to add all the components from the extra_files_path to the zip + if zpathext == '': + zpath = '%s.html' % zpath # fake the real nature of the html file + try: + if format == 'zip': + archive.add( ldda.dataset.file_name, zpath ) # add the primary of a composite set else: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) - archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) ) - elif format == 'tgz': - if trans.app.config.upstream_gzip: - archive = StreamBall( 'w|' ) - outext = 'tar' - else: - archive = StreamBall( 'w|gz' ) - outext = 'tgz' - elif format == 'tbz': - archive = StreamBall( 'w|bz2' ) - outext = 'tbz2' - except ( OSError, zipfile.BadZipfile ): - log.exception( "Unable to create archive for download" ) - raise exceptions.InternalServerError( "Unable to create archive for download." ) - except Exception: - log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] ) - raise exceptions.InternalServerError( "Unable to create archive for download." ) - composite_extensions = trans.app.datatypes_registry.get_composite_extensions() - seen = [] - for ld in library_datasets: - ldda = ld.library_dataset_dataset_association - ext = ldda.extension - is_composite = ext in composite_extensions - path = "" - parent_folder = ldda.library_dataset.folder - while parent_folder is not None: - # Exclude the now-hidden "root folder" - if parent_folder.parent is None: - path = os.path.join( parent_folder.library_root[ 0 ].name, path ) - break - path = os.path.join( parent_folder.name, path ) - parent_folder = parent_folder.parent - path += ldda.name - while path in seen: - path += '_' - seen.append( path ) - zpath = os.path.split(path)[ -1 ] # comes as base_name/fname - outfname, zpathext = os.path.splitext( zpath ) + archive.add( ldda.dataset.file_name, zpath, check_file=True ) # add the primary of a composite set + except IOError: + log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) + raise exceptions.InternalServerError( "Unable to create archive for download." ) + except ObjectNotFound: + log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) + raise exceptions.ObjectNotFound( "Requested dataset not found. " ) + except Exception, e: + log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) + raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) ) - if is_composite: # need to add all the components from the extra_files_path to the zip - if zpathext == '': - zpath = '%s.html' % zpath # fake the real nature of the html file + flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*')) # glob returns full paths + for fpath in flist: + efp, fname = os.path.split(fpath) + if fname > '': + fname = fname.translate(trantab) try: if format == 'zip': - archive.add( ldda.dataset.file_name, zpath ) # add the primary of a composite set + archive.add( fpath, fname ) else: - archive.add( ldda.dataset.file_name, zpath, check_file=True ) # add the primary of a composite set + archive.add( fpath, fname, check_file=True ) except IOError: - log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) + log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) ) raise exceptions.InternalServerError( "Unable to create archive for download." ) except ObjectNotFound: - log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) - raise exceptions.ObjectNotFound( "Requested dataset not found. " ) - except Exception, e: - log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) - raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) ) - - flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*')) # glob returns full paths - for fpath in flist: - efp, fname = os.path.split(fpath) - if fname > '': - fname = fname.translate(trantab) - try: - if format == 'zip': - archive.add( fpath, fname ) - else: - archive.add( fpath, fname, check_file=True ) - except IOError: - log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) ) - raise exceptions.InternalServerError( "Unable to create archive for download." ) - except ObjectNotFound: - log.exception( "Requested dataset %s does not exist on the host." % fpath ) - raise exceptions.ObjectNotFound( "Requested dataset not found." ) - except Exception, e: - log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) ) - raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) ) - - else: # simple case - try: - if format == 'zip': - archive.add( ldda.dataset.file_name, path ) - else: - archive.add( ldda.dataset.file_name, path, check_file=True ) - except IOError: - log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name ) - raise exceptions.InternalServerError( "Unable to create archive for download" ) - except ObjectNotFound: - log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) + log.exception( "Requested dataset %s does not exist on the host." % fpath ) raise exceptions.ObjectNotFound( "Requested dataset not found." ) except Exception, e: log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) ) - raise exceptions.InternalServerError( "Unknown error. " + str( e ) ) - lname = 'selected_dataset' - fname = lname.replace( ' ', '_' ) + '_files' - if format == 'zip': - archive.close() - trans.response.set_content_type( "application/octet-stream" ) - trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) - archive = util.streamball.ZipBall( tmpf, tmpd ) - archive.wsgi_status = trans.response.wsgi_status() - archive.wsgi_headeritems = trans.response.wsgi_headeritems() - return archive.stream - else: - trans.response.set_content_type( "application/x-tar" ) - trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) - archive.wsgi_status = trans.response.wsgi_status() - archive.wsgi_headeritems = trans.response.wsgi_headeritems() - return archive.stream + raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) ) + + else: # simple case + try: + if format == 'zip': + archive.add( ldda.dataset.file_name, path ) + else: + archive.add( ldda.dataset.file_name, path, check_file=True ) + except IOError: + log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name ) + raise exceptions.InternalServerError( "Unable to create archive for download" ) + except ObjectNotFound: + log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) + raise exceptions.ObjectNotFound( "Requested dataset not found." ) + except Exception, e: + log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) ) + raise exceptions.InternalServerError( "Unknown error. " + str( e ) ) + lname = 'selected_dataset' + fname = lname.replace( ' ', '_' ) + '_files' + if format == 'zip': + archive.close() + trans.response.set_content_type( "application/octet-stream" ) + trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) + archive = util.streamball.ZipBall( tmpf, tmpd ) + archive.wsgi_status = trans.response.wsgi_status() + archive.wsgi_headeritems = trans.response.wsgi_headeritems() + return archive.stream + else: + trans.response.set_content_type( "application/x-tar" ) + trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) + archive.wsgi_status = trans.response.wsgi_status() + archive.wsgi_headeritems = trans.response.wsgi_headeritems() + return archive.stream elif format == 'uncompressed': if len(library_datasets) != 1: raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once." ) diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/api/library_contents.py --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -146,17 +146,21 @@ the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). :type library_id: str - :param library_id: encoded id string of the library that contains this item + :param library_id: encoded id string of the library where to create the new item :type payload: dict :param payload: dictionary structure containing: * folder_id: the parent folder of the new item - * create_type: the type of item to create ('file' or 'folder') + * create_type: the type of item to create ('file', 'folder' or 'collection') * from_hda_id: (optional) the id of an accessible HDA to copy into the library * ldda_message: (optional) the new message attribute of the LDDA created * extended_metadata: (optional) dub-dictionary containing any extended metadata to associate with the item + * link_data_only: (optional) either 'copy_files' (default) or 'link_to_files' + * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' + * server_dir: (optional) only if upload_option is 'upload_directory' + * filesystem_paths: (optional) only if upload_option is 'upload_paths' and the user is an admin :rtype: dict :returns: a dictionary containing the id, name, @@ -217,11 +221,9 @@ return output else: rval = [] - for k, v in output.items(): + for v in output.values(): if ex_meta_payload is not None: - """ - If there is extended metadata, store it, attach it to the dataset, and index it - """ + # If there is extended metadata, store it, attach it to the dataset, and index it ex_meta = ExtendedMetadata(ex_meta_payload) trans.sa_session.add( ex_meta ) v.extended_metadata = ex_meta @@ -343,9 +345,9 @@ trans.sa_session.flush() def __decode_library_content_id( self, trans, content_id ): - if ( len( content_id ) % 16 == 0 ): + if len( content_id ) % 16 == 0: return 'LibraryDataset', content_id - elif ( content_id.startswith( 'F' ) ): + elif content_id.startswith( 'F' ): return 'LibraryFolder', content_id[ 1: ] else: raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) ) diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/controllers/dataset.py --- a/lib/galaxy/webapps/galaxy/controllers/dataset.py +++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py @@ -1,8 +1,6 @@ import logging import os -import tempfile import urllib -import zipfile from galaxy import datatypes, eggs, model, util, web from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user @@ -43,8 +41,8 @@ accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" } accepted_filters = [] for label, val in accepted_filter_labels_and_vals.items(): - args = { self.key: val } - accepted_filters.append( grids.GridColumnFilter( label, args) ) + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) return accepted_filters # Grid definition @@ -147,7 +145,7 @@ trans.response.set_content_type( 'text/plain' ) exit_code = "" try: - job = self._get_job_for_dataset( dataset_id ) + job = self._get_job_for_dataset( trans, dataset_id ) exit_code = job.exit_code except: exit_code = "Invalid dataset ID or you are not allowed to access this dataset" @@ -323,40 +321,38 @@ if params.annotation: annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' ) self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation ) - """ # This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI # Add or delete extended metadata - if params.extended_metadata: - em_string = params.extended_metadata - if len(em_string): - em_payload = None - try: - em_payload = loads(em_string) - except Exception, e: - message = 'Invalid JSON input' - error = True - if em_payload is not None: - if data is not None: - ex_obj = self.get_item_extended_metadata_obj(trans, data) - if ex_obj is not None: - self.unset_item_extended_metadata_obj(trans, data) - self.delete_extended_metadata(trans, ex_obj) - ex_obj = self.create_extended_metadata(trans, em_payload) - self.set_item_extended_metadata_obj(trans, data, ex_obj) - message = "Updated Extended metadata '%s'." % data.name - status = 'done' - else: - message = "data not found" - error = True - else: - if data is not None: - ex_obj = self.get_item_extended_metadata_obj(trans, data) - if ex_obj is not None: - self.unset_item_extended_metadata_obj(trans, data) - self.delete_extended_metadata(trans, ex_obj) - message = "Deleted Extended metadata '%s'." % data.name - status = 'done' - """ +# if params.extended_metadata: +# em_string = params.extended_metadata +# if len(em_string): +# em_payload = None +# try: +# em_payload = loads(em_string) +# except Exception, e: +# message = 'Invalid JSON input' +# error = True +# if em_payload is not None: +# if data is not None: +# ex_obj = self.get_item_extended_metadata_obj(trans, data) +# if ex_obj is not None: +# self.unset_item_extended_metadata_obj(trans, data) +# self.delete_extended_metadata(trans, ex_obj) +# ex_obj = self.create_extended_metadata(trans, em_payload) +# self.set_item_extended_metadata_obj(trans, data, ex_obj) +# message = "Updated Extended metadata '%s'." % data.name +# status = 'done' +# else: +# message = "data not found" +# error = True +# else: +# if data is not None: +# ex_obj = self.get_item_extended_metadata_obj(trans, data) +# if ex_obj is not None: +# self.unset_item_extended_metadata_obj(trans, data) +# self.delete_extended_metadata(trans, ex_obj) +# message = "Deleted Extended metadata '%s'." % data.name +# status = 'done' # If setting metadata previously failed and all required elements have now been set, clear the failed state. if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta(): @@ -954,13 +950,11 @@ has_parameter_errors = False inherit_chain = hda.source_dataset_chain if inherit_chain: - job_dataset_association, dataset_association_container_name = inherit_chain[-1] + job_dataset_association = inherit_chain[-1][0] else: job_dataset_association = hda if job_dataset_association.creating_job_associations: - for assoc in job_dataset_association.creating_job_associations: - job = assoc.job - break + job = job_dataset_association.creating_job_associations[0].job if job: # Get the tool object try: @@ -1024,7 +1018,7 @@ trans.sa_session.flush() target_history_ids.append( new_history.id ) if user: - target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )] + target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ] else: target_histories = [ history ] if len( target_histories ) != len( target_history_ids ): diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/controllers/library_common.py --- a/lib/galaxy/webapps/galaxy/controllers/library_common.py +++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py @@ -162,6 +162,7 @@ default_action=default_action, message=util.sanitize_text( message ), status=status ) ) + @web.expose def library_info( self, trans, cntrller, **kwd ): params = util.Params( kwd ) @@ -222,6 +223,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def library_permissions( self, trans, cntrller, **kwd ): params = util.Params( kwd ) @@ -269,6 +271,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def create_folder( self, trans, cntrller, parent_id, library_id, **kwd ): params = util.Params( kwd ) @@ -346,6 +349,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def folder_info( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -403,6 +407,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def folder_permissions( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -453,6 +458,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def ldda_edit_info( self, trans, cntrller, library_id, folder_id, id, **kwd ): params = util.Params( kwd ) @@ -606,6 +612,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def ldda_info( self, trans, cntrller, library_id, folder_id, id, **kwd ): params = util.Params( kwd ) @@ -655,6 +662,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def ldda_permissions( self, trans, cntrller, library_id, folder_id, id, **kwd ): params = util.Params( kwd ) @@ -793,6 +801,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def upload_library_dataset( self, trans, cntrller, library_id, folder_id, **kwd ): params = util.Params( kwd ) @@ -1042,6 +1051,7 @@ ldda_message=ldda_message, message=message, status=status ) + def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ): # Set up the traditional tool state/params tool_id = 'upload1' @@ -1132,6 +1142,7 @@ trans.sa_session.add( job ) trans.sa_session.flush() return output + def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ): link_data_only = params.get( 'link_data_only', 'copy_files' ) uuid_str = params.get( 'uuid', None ) @@ -1166,6 +1177,7 @@ trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) ) trans.sa_session.flush() return uploaded_dataset + def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ): dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc) files = dir_response[0] @@ -1176,6 +1188,7 @@ name = os.path.basename( file ) uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) ) return uploaded_datasets, 200, None + def _get_server_dir_files( self, params, full_dir, import_dir_desc ): files = [] try: @@ -1212,6 +1225,7 @@ response_code = 400 return None, response_code, message return files, None, None + def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ): preserve_dirs = util.string_as_bool( params.get( 'preserve_dirs', False ) ) uploaded_datasets = [] @@ -1246,6 +1260,7 @@ in_folder = None files_and_folders.append((file_path, file, in_folder)) return files_and_folders + def _paths_list(self, params): return [ (l.strip(), os.path.abspath(l.strip())) for l in params.filesystem_paths.splitlines() if l.strip() ] @@ -1463,6 +1478,7 @@ ldda_message=ldda_message, message=message, status=status ) + def _build_roles_select_list( self, trans, cntrller, library, selected_role_ids=[] ): # Get the list of legitimate roles to display on the upload form. If the library is public, # all active roles are legitimate. If the library is restricted by the LIBRARY_ACCESS permission, only @@ -1478,10 +1494,11 @@ return roles_select_list else: return None + def _build_upload_option_select_list( self, trans, upload_option, is_admin, do_not_include_values=[] ): # Build the upload_option select list. The do_not_include_values param can contain options that # should not be included in the list. For example, the 'upload_directory' option should not be - # included if uploading a new version of a librar dataset. + # included if uploading a new version of a library dataset. upload_refresh_on_change_values = [] for option_value, option_label in trans.model.LibraryDataset.upload_options: if option_value not in do_not_include_values: @@ -1508,6 +1525,7 @@ continue upload_option_select_list.add_option( option_label, option_value, selected=option_value==upload_option ) return upload_option_select_list + def _get_populated_widgets( self, folder ): # See if we have any inherited templates. info_association, inherited = folder.get_info_association( inherited=True ) @@ -1517,6 +1535,7 @@ return self.populate_widgets_from_kwd( trans, widgets, **kwd ) else: return [] + @web.expose def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ): """Catches the dataset id and displays file contents as directed""" @@ -1557,6 +1576,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='error' ) ) + @web.expose def library_dataset_info( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -1606,6 +1626,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def library_dataset_permissions( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -1654,6 +1675,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def make_library_item_public( self, trans, cntrller, library_id, item_type, id, **kwd ): params = util.Params( kwd ) @@ -1696,6 +1718,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status=status ) ) + @web.expose def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ): # This method is called from 1 of 3 places: @@ -2113,6 +2136,7 @@ use_panels=use_panels, message=message, status=status ) + @web.expose def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ): params = util.Params( kwd ) @@ -2159,6 +2183,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='done' ) ) + @web.expose def move_library_item( self, trans, cntrller, item_type, item_id, source_library_id='', make_target_current=True, **kwd ): # This method is called from one of the following places: @@ -2374,6 +2399,7 @@ use_panels=use_panels, message=message, status=status ) + @web.expose def delete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ): # This action will handle deleting all types of library items. State is saved for libraries and @@ -2441,6 +2467,7 @@ show_deleted=show_deleted, message=message, status=status ) ) + @web.expose def undelete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ): # This action will handle undeleting all types of library items @@ -2509,6 +2536,7 @@ show_deleted=show_deleted, message=message, status=status ) ) + def _check_access( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): can_access = True if isinstance( item, trans.model.HistoryDatasetAssociation ): @@ -2551,6 +2579,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='error' ) ) + def _check_add( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, item ) ): @@ -2566,6 +2595,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='error' ) ) + def _check_manage( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): if isinstance( item, trans.model.LibraryDataset ): # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE and DATASET_MANAGE_PERMISSIONS permissions. @@ -2594,6 +2624,7 @@ use_panels=use_panels, message=util.sanitize_text( message ), status='error' ) ) + def _check_modify( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): # Deny modification if the user is not an admin and does not have the LIBRARY_MODIFY permission. if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ): @@ -2619,6 +2650,7 @@ .options( eagerload_all( "actions" ) ) \ .order_by( trans.app.model.LibraryFolder.table.c.name ) \ .all() + def activatable_folders( trans, folder ): return trans.sa_session.query( trans.app.model.LibraryFolder ) \ .filter_by( parent=folder, purged=False ) \ @@ -2685,6 +2717,7 @@ if folder.parent: return branch_deleted( folder.parent ) return False + def get_containing_library_from_library_dataset( trans, library_dataset ): """Given a library_dataset, get the containing library""" folder = library_dataset.folder @@ -2698,6 +2731,7 @@ if library.root_folder == folder: return library return None + def get_comptypes( trans ): comptypes_t = comptypes if trans.app.config.nginx_x_archive_files_base: @@ -2710,6 +2744,7 @@ except: pass return comptypes_t + def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ): is_admin = trans.user_is_admin() and cntrller == 'library_admin' if is_admin: @@ -2723,6 +2758,7 @@ accessible_items.append( item ) # Sort by name return sort_by_attr( [ item for item in accessible_items ], sort_attr ) + def sort_by_attr( seq, attr ): """ Sort the sequence of objects by object's attribute @@ -2738,6 +2774,7 @@ intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq ) intermed.sort() return map( operator.getitem, intermed, ( -1, ) * len( intermed ) ) + def lucene_search( trans, cntrller, search_term, search_url, **kwd ): """Return display of results from a full-text lucene search of data libraries.""" params = util.Params( kwd ) @@ -2749,6 +2786,7 @@ response.close() lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ] return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' ) + def whoosh_search( trans, cntrller, search_term, **kwd ): """Return display of results from a full-text whoosh search of data libraries.""" params = util.Params( kwd ) https://bitbucket.org/galaxy/galaxy-central/commits/a3ebaac5d312/ Changeset: a3ebaac5d312 User: jmchilton Date: 2014-11-10 16:04:02+00:00 Summary: Merged in nsoranzo/galaxy-central (pull request #551) Small bugs, doc and pylint fixes. Affected #: 9 files diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/util/bunch.py --- a/lib/galaxy/util/bunch.py +++ b/lib/galaxy/util/bunch.py @@ -17,6 +17,12 @@ def items(self): return self.__dict__.items() + def keys(self): + return self.__dict__.keys() + + def values(self): + return self.__dict__.values() + def __str__(self): return '%s' % self.__dict__ diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -13,7 +13,6 @@ from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable -from galaxy import exceptions from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException from galaxy.exceptions import MessageException @@ -27,7 +26,7 @@ from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField from galaxy.workflow.modules import module_factory, WorkflowModuleInjector, MissingToolException -from galaxy.model.orm import eagerload, eagerload_all, desc, not_ +from galaxy.model.orm import eagerload, eagerload_all, desc from galaxy.security.validate_user_input import validate_publicname from galaxy.util.sanitize_html import sanitize_html from galaxy.model.item_attrs import Dictifiable, UsesAnnotations @@ -85,7 +84,7 @@ Convenience method to get a model object with the specified checks. """ return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted ) - + # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin # but *don't* then becomes difficult #def security_check( self, trans, item, check_ownership=False, check_accessible=False ): @@ -322,7 +321,7 @@ # Initialize count dict with all states. state_count_dict = {} - for k, state in trans.app.model.Dataset.states.items(): + for state in trans.app.model.Dataset.states.values(): state_count_dict[ state ] = 0 # Process query results, adding to count dict. @@ -370,7 +369,7 @@ # init counts, ids for each state state_counts = {} state_ids = {} - for key, state in trans.app.model.Dataset.states.items(): + for state in trans.app.model.Dataset.states.values(): state_counts[ state ] = 0 state_ids[ state ] = [] @@ -566,7 +565,7 @@ # DEPRECATION: We still support unencoded ids for backward compatibility try: dataset_id = int( dataset_id ) - except ValueError, v_err: + except ValueError: raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) ) try: @@ -589,7 +588,7 @@ error( "You are not allowed to access this dataset" ) if check_state and data.state == trans.model.Dataset.states.UPLOAD: - return trans.show_error_message( "Please wait until this dataset finishes uploading " + return trans.show_error_message( "Please wait until this dataset finishes uploading " + "before attempting to view it." ) return data @@ -651,7 +650,7 @@ check_ownership=check_ownership, check_accessible=check_accessible, check_state=check_state ) - except Exception, exception: + except Exception: pass hdas.append( hda ) return hdas @@ -711,7 +710,7 @@ # ---- return here if deleted AND purged OR can't access purged = ( hda.purged or hda.dataset.purged ) - if ( hda.deleted and purged ): + if hda.deleted and purged: #TODO: to_dict should really go AFTER this - only summary data return trans.security.encode_dict_ids( hda_dict ) @@ -747,10 +746,6 @@ #TODO: it may also be wiser to remove from here and add as API call that loads the visualizations # when the visualizations button is clicked (instead of preloading/pre-checking) - # ---- return here if deleted - if hda.deleted and not purged: - return trans.security.encode_dict_ids( hda_dict ) - return trans.security.encode_dict_ids( hda_dict ) def get_inaccessible_hda_dict( self, trans, hda ): @@ -892,7 +887,8 @@ # or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) ) def can_current_user_add_to_library_item( self, trans, item ): - if not trans.user: return False + if not trans.user: + return False return ( ( trans.user_is_admin() ) or ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) ) @@ -1411,11 +1407,6 @@ # Get data provider. track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset ) - if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ): - hda_ldda = "hda" - elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ): - hda_ldda = "ldda" - # Get track definition. return { "track_type": dataset.datatype.track_type, @@ -1705,7 +1696,7 @@ data['name'] = workflow.name data['annotation'] = annotation_str if workflow.uuid is not None: - data['uuid'] = str(workflow.uuid) + data['uuid'] = str(workflow.uuid) data['steps'] = {} # For each step, rebuild the form and encode the state for step in workflow.steps: @@ -1743,18 +1734,16 @@ step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str ) # User outputs step_dict['user_outputs'] = [] - """ - module_outputs = module.get_data_outputs() - step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) - for output in step_outputs: - name = output.output_name - annotation = "" - for module_output in module_outputs: - if module_output.get( 'name', None ) == name: - output_type = module_output.get( 'extension', '' ) - break - data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } - """ +# module_outputs = module.get_data_outputs() +# step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step ) +# for output in step_outputs: +# name = output.output_name +# annotation = "" +# for module_output in module_outputs: +# if module_output.get( 'name', None ) == name: +# output_type = module_output.get( 'extension', '' ) +# break +# data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type } # All step outputs step_dict['outputs'] = [] @@ -2139,7 +2128,7 @@ # We need the type of each template field widget widgets = item.get_template_widgets( trans ) # The list of widgets may include an AddressField which we need to save if it is new - for index, widget_dict in enumerate( widgets ): + for widget_dict in widgets: widget = widget_dict[ 'widget' ] if isinstance( widget, AddressField ): value = util.restore_text( params.get( widget.name, '' ) ) @@ -2220,7 +2209,7 @@ trans.sa_session.flush() info_association = sra.run else: - info_association = assoc.run + info_association = assoc.run else: info_association = None if info_association: @@ -2364,7 +2353,7 @@ def widget_fields_have_contents( self, widgets ): # Return True if any of the fields in widgets contain contents, widgets is a list of dictionaries that looks something like: # [{'widget': <galaxy.web.form_builder.TextField object at 0x10867aa10>, 'helptext': 'Field 0 help (Optional)', 'label': 'Field 0'}] - for i, field in enumerate( widgets ): + for field in widgets: if ( isinstance( field[ 'widget' ], TextArea ) or isinstance( field[ 'widget' ], TextField ) ) and field[ 'widget' ].value: return True if isinstance( field[ 'widget' ], SelectField ) and field[ 'widget' ].options: @@ -2385,7 +2374,7 @@ def clean_field_contents( self, widgets, **kwd ): field_contents = {} - for index, widget_dict in enumerate( widgets ): + for widget_dict in widgets: widget = widget_dict[ 'widget' ] value = kwd.get( widget.name, '' ) if isinstance( widget, CheckboxField ): @@ -2434,7 +2423,7 @@ ''' params = util.Params( kwd ) values = {} - for index, field in enumerate( form_definition.fields ): + for field in form_definition.fields: field_type = field[ 'type' ] field_name = field[ 'name' ] input_value = params.get( field_name, '' ) @@ -2586,7 +2575,7 @@ if message: return trans.fill_template( '/sharing_base.mako', item=self.get_item( trans, id ), message=message, status='error' ) user.username = username - trans.sa_session.flush + trans.sa_session.flush() return self.sharing( trans, id, **kwargs ) @web.expose @@ -2648,34 +2637,34 @@ @web.require_login( "share Galaxy items" ) def sharing( self, trans, id, **kwargs ): """ Handle item sharing. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.expose @web.require_login( "share Galaxy items" ) def share( self, trans, id=None, email="", **kwd ): """ Handle sharing an item with a particular user. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.expose def display_by_username_and_slug( self, trans, username, slug ): """ Display item by username and slug. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.json @web.require_login( "get item name and link" ) def get_name_and_link_async( self, trans, id=None ): """ Returns item's name and link. """ - raise "Unimplemented Method" + raise NotImplementedError() @web.expose @web.require_login("get item content asynchronously") def get_item_content_async( self, trans, id ): """ Returns item content in HTML format. """ - raise "Unimplemented Method" + raise NotImplementedError() def get_item( self, trans, id ): """ Return item based on id. """ - raise "Unimplemented Method" + raise NotImplementedError() class UsesQuotaMixin( object ): @@ -2692,7 +2681,7 @@ def _get_user_tags( self, trans, item_class_name, id ): user = trans.user tagged_item = self._get_tagged_item( trans, item_class_name, id ) - return [ tag for tag in tagged_item.tags if ( tag.user == user ) ] + return [ tag for tag in tagged_item.tags if tag.user == user ] def _get_tagged_item( self, trans, item_class_name, id, check_ownership=True ): tagged_item = self.get_object( trans, id, item_class_name, check_ownership=check_ownership, check_accessible=True ) @@ -2756,7 +2745,6 @@ return sorted( tags ) - class UsesExtendedMetadataMixin( SharableItemSecurityMixin ): """ Mixin for getting and setting item extended metadata. """ @@ -2846,10 +2834,10 @@ yield prefix, ("%s" % (meta)).encode("utf8", errors='replace') -""" -Deprecated: `BaseController` used to be available under the name `Root` -""" class ControllerUnavailable( Exception ): + """ + Deprecated: `BaseController` used to be available under the name `Root` + """ pass ## ---- Utility methods ------------------------------------------------------- diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/web/framework/webapp.py --- a/lib/galaxy/web/framework/webapp.py +++ b/lib/galaxy/web/framework/webapp.py @@ -504,9 +504,9 @@ for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ): username = username.replace( char, '-' ) # Find a unique username - user can change it later - if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ): + if self.sa_session.query( self.app.model.User ).filter_by( username=username ).first(): i = 1 - while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ): + while self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first(): i += 1 username += '-' + str(i) user.username = username @@ -701,11 +701,6 @@ def template_context( self ): return dict() - def make_form_data( self, name, **kwargs ): - rval = self.template_context[name] = FormData() - rval.values.update( kwargs ) - return rval - def set_message( self, message, type=None ): """ Convenience method for setting the 'message' and 'message_type' diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/demo_sequencer/framework/__init__.py --- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py +++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py @@ -116,10 +116,7 @@ @galaxy.web.framework.base.lazy_property def template_context( self ): return dict() - def make_form_data( self, name, **kwargs ): - rval = self.template_context[name] = FormData() - rval.values.update( kwargs ) - return rval + def set_message( self, message, type=None ): """ Convenience method for setting the 'message' and 'message_type' diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/histories.py --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -92,7 +92,7 @@ :rtype: dictionary :returns: detailed history information from - :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_history_dict` + :func:`galaxy.web.base.controller.UsesHistoryMixin.get_history_dict` """ history_id = id deleted = string_as_bool( deleted ) diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/lda_datasets.py --- a/lib/galaxy/webapps/galaxy/api/lda_datasets.py +++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py @@ -480,7 +480,7 @@ tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd ) tool_params = state.inputs dataset_upload_inputs = [] - for input_name, input in tool.inputs.iteritems(): + for input in tool.inputs.itervalues(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) ) @@ -536,7 +536,7 @@ * POST /api/libraries/datasets/download/{format} Downloads requested datasets (identified by encoded IDs) in requested format. - example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ldda_ids%255B%255D=a0d84b45643a2678&ldda_ids%255B%255D=fe38c84dcd46c828`` + example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828`` .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed' @@ -554,7 +554,7 @@ datasets_to_download = kwd.get( 'ld_ids%5B%5D', None ) if datasets_to_download is None: datasets_to_download = kwd.get( 'ld_ids', None ) - if ( datasets_to_download is not None ): + if datasets_to_download is not None: datasets_to_download = util.listify( datasets_to_download ) for dataset_id in datasets_to_download: try: @@ -570,128 +570,128 @@ raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids to download.' ) if format in [ 'zip', 'tgz', 'tbz' ]: - # error = False - killme = string.punctuation + string.whitespace - trantab = string.maketrans( killme, '_'*len( killme ) ) - try: - outext = 'zip' - if format == 'zip': - # Can't use mkstemp - the file must not exist first - tmpd = tempfile.mkdtemp() - util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid ) - tmpf = os.path.join( tmpd, 'library_download.' + format ) - if trans.app.config.upstream_gzip: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True ) + # error = False + killme = string.punctuation + string.whitespace + trantab = string.maketrans( killme, '_'*len( killme ) ) + try: + outext = 'zip' + if format == 'zip': + # Can't use mkstemp - the file must not exist first + tmpd = tempfile.mkdtemp() + util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid ) + tmpf = os.path.join( tmpd, 'library_download.' + format ) + if trans.app.config.upstream_gzip: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True ) + else: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) + archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) ) + elif format == 'tgz': + if trans.app.config.upstream_gzip: + archive = StreamBall( 'w|' ) + outext = 'tar' + else: + archive = StreamBall( 'w|gz' ) + outext = 'tgz' + elif format == 'tbz': + archive = StreamBall( 'w|bz2' ) + outext = 'tbz2' + except ( OSError, zipfile.BadZipfile ): + log.exception( "Unable to create archive for download" ) + raise exceptions.InternalServerError( "Unable to create archive for download." ) + except Exception: + log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] ) + raise exceptions.InternalServerError( "Unable to create archive for download." ) + composite_extensions = trans.app.datatypes_registry.get_composite_extensions() + seen = [] + for ld in library_datasets: + ldda = ld.library_dataset_dataset_association + ext = ldda.extension + is_composite = ext in composite_extensions + path = "" + parent_folder = ldda.library_dataset.folder + while parent_folder is not None: + # Exclude the now-hidden "root folder" + if parent_folder.parent is None: + path = os.path.join( parent_folder.library_root[ 0 ].name, path ) + break + path = os.path.join( parent_folder.name, path ) + parent_folder = parent_folder.parent + path += ldda.name + while path in seen: + path += '_' + seen.append( path ) + zpath = os.path.split(path)[ -1 ] # comes as base_name/fname + outfname, zpathext = os.path.splitext( zpath ) + + if is_composite: # need to add all the components from the extra_files_path to the zip + if zpathext == '': + zpath = '%s.html' % zpath # fake the real nature of the html file + try: + if format == 'zip': + archive.add( ldda.dataset.file_name, zpath ) # add the primary of a composite set else: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) - archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) ) - elif format == 'tgz': - if trans.app.config.upstream_gzip: - archive = StreamBall( 'w|' ) - outext = 'tar' - else: - archive = StreamBall( 'w|gz' ) - outext = 'tgz' - elif format == 'tbz': - archive = StreamBall( 'w|bz2' ) - outext = 'tbz2' - except ( OSError, zipfile.BadZipfile ): - log.exception( "Unable to create archive for download" ) - raise exceptions.InternalServerError( "Unable to create archive for download." ) - except Exception: - log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] ) - raise exceptions.InternalServerError( "Unable to create archive for download." ) - composite_extensions = trans.app.datatypes_registry.get_composite_extensions() - seen = [] - for ld in library_datasets: - ldda = ld.library_dataset_dataset_association - ext = ldda.extension - is_composite = ext in composite_extensions - path = "" - parent_folder = ldda.library_dataset.folder - while parent_folder is not None: - # Exclude the now-hidden "root folder" - if parent_folder.parent is None: - path = os.path.join( parent_folder.library_root[ 0 ].name, path ) - break - path = os.path.join( parent_folder.name, path ) - parent_folder = parent_folder.parent - path += ldda.name - while path in seen: - path += '_' - seen.append( path ) - zpath = os.path.split(path)[ -1 ] # comes as base_name/fname - outfname, zpathext = os.path.splitext( zpath ) + archive.add( ldda.dataset.file_name, zpath, check_file=True ) # add the primary of a composite set + except IOError: + log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) + raise exceptions.InternalServerError( "Unable to create archive for download." ) + except ObjectNotFound: + log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) + raise exceptions.ObjectNotFound( "Requested dataset not found. " ) + except Exception, e: + log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) + raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) ) - if is_composite: # need to add all the components from the extra_files_path to the zip - if zpathext == '': - zpath = '%s.html' % zpath # fake the real nature of the html file + flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*')) # glob returns full paths + for fpath in flist: + efp, fname = os.path.split(fpath) + if fname > '': + fname = fname.translate(trantab) try: if format == 'zip': - archive.add( ldda.dataset.file_name, zpath ) # add the primary of a composite set + archive.add( fpath, fname ) else: - archive.add( ldda.dataset.file_name, zpath, check_file=True ) # add the primary of a composite set + archive.add( fpath, fname, check_file=True ) except IOError: - log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) + log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) ) raise exceptions.InternalServerError( "Unable to create archive for download." ) except ObjectNotFound: - log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) - raise exceptions.ObjectNotFound( "Requested dataset not found. " ) - except Exception, e: - log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name ) - raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) ) - - flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*')) # glob returns full paths - for fpath in flist: - efp, fname = os.path.split(fpath) - if fname > '': - fname = fname.translate(trantab) - try: - if format == 'zip': - archive.add( fpath, fname ) - else: - archive.add( fpath, fname, check_file=True ) - except IOError: - log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) ) - raise exceptions.InternalServerError( "Unable to create archive for download." ) - except ObjectNotFound: - log.exception( "Requested dataset %s does not exist on the host." % fpath ) - raise exceptions.ObjectNotFound( "Requested dataset not found." ) - except Exception, e: - log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) ) - raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) ) - - else: # simple case - try: - if format == 'zip': - archive.add( ldda.dataset.file_name, path ) - else: - archive.add( ldda.dataset.file_name, path, check_file=True ) - except IOError: - log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name ) - raise exceptions.InternalServerError( "Unable to create archive for download" ) - except ObjectNotFound: - log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) + log.exception( "Requested dataset %s does not exist on the host." % fpath ) raise exceptions.ObjectNotFound( "Requested dataset not found." ) except Exception, e: log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) ) - raise exceptions.InternalServerError( "Unknown error. " + str( e ) ) - lname = 'selected_dataset' - fname = lname.replace( ' ', '_' ) + '_files' - if format == 'zip': - archive.close() - trans.response.set_content_type( "application/octet-stream" ) - trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) - archive = util.streamball.ZipBall( tmpf, tmpd ) - archive.wsgi_status = trans.response.wsgi_status() - archive.wsgi_headeritems = trans.response.wsgi_headeritems() - return archive.stream - else: - trans.response.set_content_type( "application/x-tar" ) - trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) - archive.wsgi_status = trans.response.wsgi_status() - archive.wsgi_headeritems = trans.response.wsgi_headeritems() - return archive.stream + raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) ) + + else: # simple case + try: + if format == 'zip': + archive.add( ldda.dataset.file_name, path ) + else: + archive.add( ldda.dataset.file_name, path, check_file=True ) + except IOError: + log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name ) + raise exceptions.InternalServerError( "Unable to create archive for download" ) + except ObjectNotFound: + log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name ) + raise exceptions.ObjectNotFound( "Requested dataset not found." ) + except Exception, e: + log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) ) + raise exceptions.InternalServerError( "Unknown error. " + str( e ) ) + lname = 'selected_dataset' + fname = lname.replace( ' ', '_' ) + '_files' + if format == 'zip': + archive.close() + trans.response.set_content_type( "application/octet-stream" ) + trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) + archive = util.streamball.ZipBall( tmpf, tmpd ) + archive.wsgi_status = trans.response.wsgi_status() + archive.wsgi_headeritems = trans.response.wsgi_headeritems() + return archive.stream + else: + trans.response.set_content_type( "application/x-tar" ) + trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext ) + archive.wsgi_status = trans.response.wsgi_status() + archive.wsgi_headeritems = trans.response.wsgi_headeritems() + return archive.stream elif format == 'uncompressed': if len(library_datasets) != 1: raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once." ) diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/library_contents.py --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -146,17 +146,21 @@ the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). :type library_id: str - :param library_id: encoded id string of the library that contains this item + :param library_id: encoded id string of the library where to create the new item :type payload: dict :param payload: dictionary structure containing: * folder_id: the parent folder of the new item - * create_type: the type of item to create ('file' or 'folder') + * create_type: the type of item to create ('file', 'folder' or 'collection') * from_hda_id: (optional) the id of an accessible HDA to copy into the library * ldda_message: (optional) the new message attribute of the LDDA created * extended_metadata: (optional) dub-dictionary containing any extended metadata to associate with the item + * link_data_only: (optional) either 'copy_files' (default) or 'link_to_files' + * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' + * server_dir: (optional) only if upload_option is 'upload_directory' + * filesystem_paths: (optional) only if upload_option is 'upload_paths' and the user is an admin :rtype: dict :returns: a dictionary containing the id, name, @@ -217,11 +221,9 @@ return output else: rval = [] - for k, v in output.items(): + for v in output.values(): if ex_meta_payload is not None: - """ - If there is extended metadata, store it, attach it to the dataset, and index it - """ + # If there is extended metadata, store it, attach it to the dataset, and index it ex_meta = ExtendedMetadata(ex_meta_payload) trans.sa_session.add( ex_meta ) v.extended_metadata = ex_meta @@ -343,9 +345,9 @@ trans.sa_session.flush() def __decode_library_content_id( self, trans, content_id ): - if ( len( content_id ) % 16 == 0 ): + if len( content_id ) % 16 == 0: return 'LibraryDataset', content_id - elif ( content_id.startswith( 'F' ) ): + elif content_id.startswith( 'F' ): return 'LibraryFolder', content_id[ 1: ] else: raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) ) diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/controllers/dataset.py --- a/lib/galaxy/webapps/galaxy/controllers/dataset.py +++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py @@ -1,8 +1,6 @@ import logging import os -import tempfile import urllib -import zipfile from galaxy import datatypes, eggs, model, util, web from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user @@ -43,8 +41,8 @@ accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" } accepted_filters = [] for label, val in accepted_filter_labels_and_vals.items(): - args = { self.key: val } - accepted_filters.append( grids.GridColumnFilter( label, args) ) + args = { self.key: val } + accepted_filters.append( grids.GridColumnFilter( label, args) ) return accepted_filters # Grid definition @@ -147,7 +145,7 @@ trans.response.set_content_type( 'text/plain' ) exit_code = "" try: - job = self._get_job_for_dataset( dataset_id ) + job = self._get_job_for_dataset( trans, dataset_id ) exit_code = job.exit_code except: exit_code = "Invalid dataset ID or you are not allowed to access this dataset" @@ -323,40 +321,38 @@ if params.annotation: annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' ) self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation ) - """ # This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI # Add or delete extended metadata - if params.extended_metadata: - em_string = params.extended_metadata - if len(em_string): - em_payload = None - try: - em_payload = loads(em_string) - except Exception, e: - message = 'Invalid JSON input' - error = True - if em_payload is not None: - if data is not None: - ex_obj = self.get_item_extended_metadata_obj(trans, data) - if ex_obj is not None: - self.unset_item_extended_metadata_obj(trans, data) - self.delete_extended_metadata(trans, ex_obj) - ex_obj = self.create_extended_metadata(trans, em_payload) - self.set_item_extended_metadata_obj(trans, data, ex_obj) - message = "Updated Extended metadata '%s'." % data.name - status = 'done' - else: - message = "data not found" - error = True - else: - if data is not None: - ex_obj = self.get_item_extended_metadata_obj(trans, data) - if ex_obj is not None: - self.unset_item_extended_metadata_obj(trans, data) - self.delete_extended_metadata(trans, ex_obj) - message = "Deleted Extended metadata '%s'." % data.name - status = 'done' - """ +# if params.extended_metadata: +# em_string = params.extended_metadata +# if len(em_string): +# em_payload = None +# try: +# em_payload = loads(em_string) +# except Exception, e: +# message = 'Invalid JSON input' +# error = True +# if em_payload is not None: +# if data is not None: +# ex_obj = self.get_item_extended_metadata_obj(trans, data) +# if ex_obj is not None: +# self.unset_item_extended_metadata_obj(trans, data) +# self.delete_extended_metadata(trans, ex_obj) +# ex_obj = self.create_extended_metadata(trans, em_payload) +# self.set_item_extended_metadata_obj(trans, data, ex_obj) +# message = "Updated Extended metadata '%s'." % data.name +# status = 'done' +# else: +# message = "data not found" +# error = True +# else: +# if data is not None: +# ex_obj = self.get_item_extended_metadata_obj(trans, data) +# if ex_obj is not None: +# self.unset_item_extended_metadata_obj(trans, data) +# self.delete_extended_metadata(trans, ex_obj) +# message = "Deleted Extended metadata '%s'." % data.name +# status = 'done' # If setting metadata previously failed and all required elements have now been set, clear the failed state. if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta(): @@ -954,13 +950,11 @@ has_parameter_errors = False inherit_chain = hda.source_dataset_chain if inherit_chain: - job_dataset_association, dataset_association_container_name = inherit_chain[-1] + job_dataset_association = inherit_chain[-1][0] else: job_dataset_association = hda if job_dataset_association.creating_job_associations: - for assoc in job_dataset_association.creating_job_associations: - job = assoc.job - break + job = job_dataset_association.creating_job_associations[0].job if job: # Get the tool object try: @@ -1024,7 +1018,7 @@ trans.sa_session.flush() target_history_ids.append( new_history.id ) if user: - target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )] + target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ] else: target_histories = [ history ] if len( target_histories ) != len( target_history_ids ): diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/controllers/library_common.py --- a/lib/galaxy/webapps/galaxy/controllers/library_common.py +++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py @@ -162,6 +162,7 @@ default_action=default_action, message=util.sanitize_text( message ), status=status ) ) + @web.expose def library_info( self, trans, cntrller, **kwd ): params = util.Params( kwd ) @@ -222,6 +223,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def library_permissions( self, trans, cntrller, **kwd ): params = util.Params( kwd ) @@ -269,6 +271,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def create_folder( self, trans, cntrller, parent_id, library_id, **kwd ): params = util.Params( kwd ) @@ -346,6 +349,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def folder_info( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -403,6 +407,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def folder_permissions( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -453,6 +458,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def ldda_edit_info( self, trans, cntrller, library_id, folder_id, id, **kwd ): params = util.Params( kwd ) @@ -606,6 +612,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def ldda_info( self, trans, cntrller, library_id, folder_id, id, **kwd ): params = util.Params( kwd ) @@ -655,6 +662,7 @@ inherited=inherited, message=message, status=status ) + @web.expose def ldda_permissions( self, trans, cntrller, library_id, folder_id, id, **kwd ): params = util.Params( kwd ) @@ -793,6 +801,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def upload_library_dataset( self, trans, cntrller, library_id, folder_id, **kwd ): params = util.Params( kwd ) @@ -1042,6 +1051,7 @@ ldda_message=ldda_message, message=message, status=status ) + def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ): # Set up the traditional tool state/params tool_id = 'upload1' @@ -1132,6 +1142,7 @@ trans.sa_session.add( job ) trans.sa_session.flush() return output + def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ): link_data_only = params.get( 'link_data_only', 'copy_files' ) uuid_str = params.get( 'uuid', None ) @@ -1166,6 +1177,7 @@ trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) ) trans.sa_session.flush() return uploaded_dataset + def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ): dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc) files = dir_response[0] @@ -1176,6 +1188,7 @@ name = os.path.basename( file ) uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) ) return uploaded_datasets, 200, None + def _get_server_dir_files( self, params, full_dir, import_dir_desc ): files = [] try: @@ -1212,6 +1225,7 @@ response_code = 400 return None, response_code, message return files, None, None + def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ): preserve_dirs = util.string_as_bool( params.get( 'preserve_dirs', False ) ) uploaded_datasets = [] @@ -1246,6 +1260,7 @@ in_folder = None files_and_folders.append((file_path, file, in_folder)) return files_and_folders + def _paths_list(self, params): return [ (l.strip(), os.path.abspath(l.strip())) for l in params.filesystem_paths.splitlines() if l.strip() ] @@ -1463,6 +1478,7 @@ ldda_message=ldda_message, message=message, status=status ) + def _build_roles_select_list( self, trans, cntrller, library, selected_role_ids=[] ): # Get the list of legitimate roles to display on the upload form. If the library is public, # all active roles are legitimate. If the library is restricted by the LIBRARY_ACCESS permission, only @@ -1478,10 +1494,11 @@ return roles_select_list else: return None + def _build_upload_option_select_list( self, trans, upload_option, is_admin, do_not_include_values=[] ): # Build the upload_option select list. The do_not_include_values param can contain options that # should not be included in the list. For example, the 'upload_directory' option should not be - # included if uploading a new version of a librar dataset. + # included if uploading a new version of a library dataset. upload_refresh_on_change_values = [] for option_value, option_label in trans.model.LibraryDataset.upload_options: if option_value not in do_not_include_values: @@ -1508,6 +1525,7 @@ continue upload_option_select_list.add_option( option_label, option_value, selected=option_value==upload_option ) return upload_option_select_list + def _get_populated_widgets( self, folder ): # See if we have any inherited templates. info_association, inherited = folder.get_info_association( inherited=True ) @@ -1517,6 +1535,7 @@ return self.populate_widgets_from_kwd( trans, widgets, **kwd ) else: return [] + @web.expose def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ): """Catches the dataset id and displays file contents as directed""" @@ -1557,6 +1576,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='error' ) ) + @web.expose def library_dataset_info( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -1606,6 +1626,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def library_dataset_permissions( self, trans, cntrller, id, library_id, **kwd ): params = util.Params( kwd ) @@ -1654,6 +1675,7 @@ show_deleted=show_deleted, message=message, status=status ) + @web.expose def make_library_item_public( self, trans, cntrller, library_id, item_type, id, **kwd ): params = util.Params( kwd ) @@ -1696,6 +1718,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status=status ) ) + @web.expose def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ): # This method is called from 1 of 3 places: @@ -2113,6 +2136,7 @@ use_panels=use_panels, message=message, status=status ) + @web.expose def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ): params = util.Params( kwd ) @@ -2159,6 +2183,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='done' ) ) + @web.expose def move_library_item( self, trans, cntrller, item_type, item_id, source_library_id='', make_target_current=True, **kwd ): # This method is called from one of the following places: @@ -2374,6 +2399,7 @@ use_panels=use_panels, message=message, status=status ) + @web.expose def delete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ): # This action will handle deleting all types of library items. State is saved for libraries and @@ -2441,6 +2467,7 @@ show_deleted=show_deleted, message=message, status=status ) ) + @web.expose def undelete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ): # This action will handle undeleting all types of library items @@ -2509,6 +2536,7 @@ show_deleted=show_deleted, message=message, status=status ) ) + def _check_access( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): can_access = True if isinstance( item, trans.model.HistoryDatasetAssociation ): @@ -2551,6 +2579,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='error' ) ) + def _check_add( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, item ) ): @@ -2566,6 +2595,7 @@ show_deleted=show_deleted, message=util.sanitize_text( message ), status='error' ) ) + def _check_manage( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): if isinstance( item, trans.model.LibraryDataset ): # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE and DATASET_MANAGE_PERMISSIONS permissions. @@ -2594,6 +2624,7 @@ use_panels=use_panels, message=util.sanitize_text( message ), status='error' ) ) + def _check_modify( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ): # Deny modification if the user is not an admin and does not have the LIBRARY_MODIFY permission. if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ): @@ -2619,6 +2650,7 @@ .options( eagerload_all( "actions" ) ) \ .order_by( trans.app.model.LibraryFolder.table.c.name ) \ .all() + def activatable_folders( trans, folder ): return trans.sa_session.query( trans.app.model.LibraryFolder ) \ .filter_by( parent=folder, purged=False ) \ @@ -2685,6 +2717,7 @@ if folder.parent: return branch_deleted( folder.parent ) return False + def get_containing_library_from_library_dataset( trans, library_dataset ): """Given a library_dataset, get the containing library""" folder = library_dataset.folder @@ -2698,6 +2731,7 @@ if library.root_folder == folder: return library return None + def get_comptypes( trans ): comptypes_t = comptypes if trans.app.config.nginx_x_archive_files_base: @@ -2710,6 +2744,7 @@ except: pass return comptypes_t + def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ): is_admin = trans.user_is_admin() and cntrller == 'library_admin' if is_admin: @@ -2723,6 +2758,7 @@ accessible_items.append( item ) # Sort by name return sort_by_attr( [ item for item in accessible_items ], sort_attr ) + def sort_by_attr( seq, attr ): """ Sort the sequence of objects by object's attribute @@ -2738,6 +2774,7 @@ intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq ) intermed.sort() return map( operator.getitem, intermed, ( -1, ) * len( intermed ) ) + def lucene_search( trans, cntrller, search_term, search_url, **kwd ): """Return display of results from a full-text lucene search of data libraries.""" params = util.Params( kwd ) @@ -2749,6 +2786,7 @@ response.close() lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ] return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' ) + def whoosh_search( trans, cntrller, search_term, **kwd ): """Return display of results from a full-text whoosh search of data libraries.""" params = util.Params( kwd ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org