galaxy-commits
  Threads by month 
                
            - ----- 2025 -----
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
November 2014
- 2 participants
- 184 discussions
 
                        
                    
                        
                            
                                
                            
                            commit/galaxy-central: jmchilton: Merged in nsoranzo/galaxy-central (pull request #551)
                        
                        
by commits-noreply@bitbucket.org 10 Nov '14
                    by commits-noreply@bitbucket.org 10 Nov '14
10 Nov '14
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a3ebaac5d312/
Changeset:   a3ebaac5d312
User:        jmchilton
Date:        2014-11-10 16:04:02+00:00
Summary:     Merged in nsoranzo/galaxy-central (pull request #551)
Small bugs, doc and pylint fixes.
Affected #:  9 files
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/util/bunch.py
--- a/lib/galaxy/util/bunch.py
+++ b/lib/galaxy/util/bunch.py
@@ -17,6 +17,12 @@
     def items(self):
         return self.__dict__.items()
 
+    def keys(self):
+        return self.__dict__.keys()
+
+    def values(self):
+        return self.__dict__.values()
+
     def __str__(self):
         return '%s' % self.__dict__
 
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -13,7 +13,6 @@
 
 from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError
 from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable
-from galaxy import exceptions
 from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException
 from galaxy.exceptions import MessageException
 
@@ -27,7 +26,7 @@
 from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField
 from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField
 from galaxy.workflow.modules import module_factory, WorkflowModuleInjector, MissingToolException
-from galaxy.model.orm import eagerload, eagerload_all, desc, not_
+from galaxy.model.orm import eagerload, eagerload_all, desc
 from galaxy.security.validate_user_input import validate_publicname
 from galaxy.util.sanitize_html import sanitize_html
 from galaxy.model.item_attrs import Dictifiable, UsesAnnotations
@@ -85,7 +84,7 @@
         Convenience method to get a model object with the specified checks.
         """
         return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
-  
+
     # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin
     #   but *don't* then becomes difficult
     #def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
@@ -322,7 +321,7 @@
 
         # Initialize count dict with all states.
         state_count_dict = {}
-        for k, state in trans.app.model.Dataset.states.items():
+        for state in trans.app.model.Dataset.states.values():
             state_count_dict[ state ] = 0
 
         # Process query results, adding to count dict.
@@ -370,7 +369,7 @@
         # init counts, ids for each state
         state_counts = {}
         state_ids = {}
-        for key, state in trans.app.model.Dataset.states.items():
+        for state in trans.app.model.Dataset.states.values():
             state_counts[ state ] = 0
             state_ids[ state ] = []
 
@@ -566,7 +565,7 @@
             # DEPRECATION: We still support unencoded ids for backward compatibility
             try:
                 dataset_id = int( dataset_id )
-            except ValueError, v_err:
+            except ValueError:
                 raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) )
 
         try:
@@ -589,7 +588,7 @@
                 error( "You are not allowed to access this dataset" )
 
             if check_state and data.state == trans.model.Dataset.states.UPLOAD:
-                    return trans.show_error_message( "Please wait until this dataset finishes uploading "
+                return trans.show_error_message( "Please wait until this dataset finishes uploading "
                                                    + "before attempting to view it." )
         return data
 
@@ -651,7 +650,7 @@
                     check_ownership=check_ownership,
                     check_accessible=check_accessible,
                     check_state=check_state )
-            except Exception, exception:
+            except Exception:
                 pass
             hdas.append( hda )
         return hdas
@@ -711,7 +710,7 @@
 
         # ---- return here if deleted AND purged OR can't access
         purged = ( hda.purged or hda.dataset.purged )
-        if ( hda.deleted and purged ):
+        if hda.deleted and purged:
             #TODO: to_dict should really go AFTER this - only summary data
             return trans.security.encode_dict_ids( hda_dict )
 
@@ -747,10 +746,6 @@
         #TODO: it may also be wiser to remove from here and add as API call that loads the visualizations
         #           when the visualizations button is clicked (instead of preloading/pre-checking)
 
-        # ---- return here if deleted
-        if hda.deleted and not purged:
-            return trans.security.encode_dict_ids( hda_dict )
-
         return trans.security.encode_dict_ids( hda_dict )
 
     def get_inaccessible_hda_dict( self, trans, hda ):
@@ -892,7 +887,8 @@
     #           or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) )
 
     def can_current_user_add_to_library_item( self, trans, item ):
-        if not trans.user: return False
+        if not trans.user:
+            return False
         return (  ( trans.user_is_admin() )
                or ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) )
 
@@ -1411,11 +1407,6 @@
         # Get data provider.
         track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset )
 
-        if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ):
-            hda_ldda = "hda"
-        elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ):
-            hda_ldda = "ldda"
-
         # Get track definition.
         return {
             "track_type": dataset.datatype.track_type,
@@ -1705,7 +1696,7 @@
         data['name'] = workflow.name
         data['annotation'] = annotation_str
         if workflow.uuid is not None:
-            data['uuid'] = str(workflow.uuid)  
+            data['uuid'] = str(workflow.uuid)
         data['steps'] = {}
         # For each step, rebuild the form and encode the state
         for step in workflow.steps:
@@ -1743,18 +1734,16 @@
             step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str )
             # User outputs
             step_dict['user_outputs'] = []
-            """
-            module_outputs = module.get_data_outputs()
-            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
-            for output in step_outputs:
-                name = output.output_name
-                annotation = ""
-                for module_output in module_outputs:
-                    if module_output.get( 'name', None ) == name:
-                        output_type = module_output.get( 'extension', '' )
-                        break
-                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
-            """
+#            module_outputs = module.get_data_outputs()
+#            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
+#            for output in step_outputs:
+#                name = output.output_name
+#                annotation = ""
+#                for module_output in module_outputs:
+#                    if module_output.get( 'name', None ) == name:
+#                        output_type = module_output.get( 'extension', '' )
+#                        break
+#                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
 
             # All step outputs
             step_dict['outputs'] = []
@@ -2139,7 +2128,7 @@
         # We need the type of each template field widget
         widgets = item.get_template_widgets( trans )
         # The list of widgets may include an AddressField which we need to save if it is new
-        for index, widget_dict in enumerate( widgets ):
+        for widget_dict in widgets:
             widget = widget_dict[ 'widget' ]
             if isinstance( widget, AddressField ):
                 value = util.restore_text( params.get( widget.name, '' ) )
@@ -2220,7 +2209,7 @@
                         trans.sa_session.flush()
                         info_association = sra.run
                     else:
-                       info_association = assoc.run
+                        info_association = assoc.run
                 else:
                     info_association = None
             if info_association:
@@ -2364,7 +2353,7 @@
     def widget_fields_have_contents( self, widgets ):
         # Return True if any of the fields in widgets contain contents, widgets is a list of dictionaries that looks something like:
         # [{'widget': <galaxy.web.form_builder.TextField object at 0x10867aa10>, 'helptext': 'Field 0 help (Optional)', 'label': 'Field 0'}]
-        for i, field in enumerate( widgets ):
+        for field in widgets:
             if ( isinstance( field[ 'widget' ], TextArea ) or isinstance( field[ 'widget' ], TextField ) ) and field[ 'widget' ].value:
                 return True
             if isinstance( field[ 'widget' ], SelectField ) and field[ 'widget' ].options:
@@ -2385,7 +2374,7 @@
 
     def clean_field_contents( self, widgets, **kwd ):
         field_contents = {}
-        for index, widget_dict in enumerate( widgets ):
+        for widget_dict in widgets:
             widget = widget_dict[ 'widget' ]
             value = kwd.get( widget.name, ''  )
             if isinstance( widget, CheckboxField ):
@@ -2434,7 +2423,7 @@
         '''
         params = util.Params( kwd )
         values = {}
-        for index, field in enumerate( form_definition.fields ):
+        for field in form_definition.fields:
             field_type = field[ 'type' ]
             field_name = field[ 'name' ]
             input_value = params.get( field_name, '' )
@@ -2586,7 +2575,7 @@
         if message:
             return trans.fill_template( '/sharing_base.mako', item=self.get_item( trans, id ), message=message, status='error' )
         user.username = username
-        trans.sa_session.flush
+        trans.sa_session.flush()
         return self.sharing( trans, id, **kwargs )
 
     @web.expose
@@ -2648,34 +2637,34 @@
     @web.require_login( "share Galaxy items" )
     def sharing( self, trans, id, **kwargs ):
         """ Handle item sharing. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     @web.require_login( "share Galaxy items" )
     def share( self, trans, id=None, email="", **kwd ):
         """ Handle sharing an item with a particular user. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     def display_by_username_and_slug( self, trans, username, slug ):
         """ Display item by username and slug. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.json
     @web.require_login( "get item name and link" )
     def get_name_and_link_async( self, trans, id=None ):
         """ Returns item's name and link. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     @web.require_login("get item content asynchronously")
     def get_item_content_async( self, trans, id ):
         """ Returns item content in HTML format. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     def get_item( self, trans, id ):
         """ Return item based on id. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
 
 class UsesQuotaMixin( object ):
@@ -2692,7 +2681,7 @@
     def _get_user_tags( self, trans, item_class_name, id ):
         user = trans.user
         tagged_item = self._get_tagged_item( trans, item_class_name, id )
-        return [ tag for tag in tagged_item.tags if ( tag.user == user ) ]
+        return [ tag for tag in tagged_item.tags if tag.user == user ]
 
     def _get_tagged_item( self, trans, item_class_name, id, check_ownership=True ):
         tagged_item = self.get_object( trans, id, item_class_name, check_ownership=check_ownership, check_accessible=True )
@@ -2756,7 +2745,6 @@
         return sorted( tags )
 
 
-
 class UsesExtendedMetadataMixin( SharableItemSecurityMixin ):
     """ Mixin for getting and setting item extended metadata. """
 
@@ -2846,10 +2834,10 @@
             yield prefix, ("%s" % (meta)).encode("utf8", errors='replace')
 
 
-"""
-Deprecated: `BaseController` used to be available under the name `Root`
-"""
 class ControllerUnavailable( Exception ):
+    """
+    Deprecated: `BaseController` used to be available under the name `Root`
+    """
     pass
 
 ## ---- Utility methods -------------------------------------------------------
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -504,9 +504,9 @@
             for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
                 username = username.replace( char, '-' )
             # Find a unique username - user can change it later
-            if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
+            if self.sa_session.query( self.app.model.User ).filter_by( username=username ).first():
                 i = 1
-                while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
+                while self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first():
                     i += 1
                 username += '-' + str(i)
             user.username = username
@@ -701,11 +701,6 @@
     def template_context( self ):
         return dict()
 
-    def make_form_data( self, name, **kwargs ):
-        rval = self.template_context[name] = FormData()
-        rval.values.update( kwargs )
-        return rval
-
     def set_message( self, message, type=None ):
         """
         Convenience method for setting the 'message' and 'message_type'
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/demo_sequencer/framework/__init__.py
--- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
+++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
@@ -116,10 +116,7 @@
     @galaxy.web.framework.base.lazy_property
     def template_context( self ):
         return dict()
-    def make_form_data( self, name, **kwargs ):
-        rval = self.template_context[name] = FormData()
-        rval.values.update( kwargs )
-        return rval
+
     def set_message( self, message, type=None ):
         """
         Convenience method for setting the 'message' and 'message_type'
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -92,7 +92,7 @@
 
         :rtype:     dictionary
         :returns:   detailed history information from
-            :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_history_dict`
+            :func:`galaxy.web.base.controller.UsesHistoryMixin.get_history_dict`
         """
         history_id = id
         deleted = string_as_bool( deleted )
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/lda_datasets.py
--- a/lib/galaxy/webapps/galaxy/api/lda_datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
@@ -480,7 +480,7 @@
         tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd )
         tool_params = state.inputs
         dataset_upload_inputs = []
-        for input_name, input in tool.inputs.iteritems():
+        for input in tool.inputs.itervalues():
             if input.type == "upload_dataset":
                 dataset_upload_inputs.append( input )
         library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) )
@@ -536,7 +536,7 @@
         * POST /api/libraries/datasets/download/{format}
             Downloads requested datasets (identified by encoded IDs) in requested format.
 
-        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ldda_ids%255B%255D=a0d84b45643a2678&ldda_ids%255B%255D=fe38c84dcd46c828``
+        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828``
 
         .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed'
 
@@ -554,7 +554,7 @@
         datasets_to_download = kwd.get( 'ld_ids%5B%5D', None )
         if datasets_to_download is None:
             datasets_to_download = kwd.get( 'ld_ids', None )
-        if ( datasets_to_download is not None ):
+        if datasets_to_download is not None:
             datasets_to_download = util.listify( datasets_to_download )
             for dataset_id in datasets_to_download:
                 try:
@@ -570,128 +570,128 @@
             raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids to download.' )
 
         if format in [ 'zip', 'tgz', 'tbz' ]:
-                # error = False
-                killme = string.punctuation + string.whitespace
-                trantab = string.maketrans( killme, '_'*len( killme ) )
-                try:
-                    outext = 'zip'
-                    if format == 'zip':
-                        # Can't use mkstemp - the file must not exist first
-                        tmpd = tempfile.mkdtemp()
-                        util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
-                        tmpf = os.path.join( tmpd, 'library_download.' + format )
-                        if trans.app.config.upstream_gzip:
-                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+            # error = False
+            killme = string.punctuation + string.whitespace
+            trantab = string.maketrans( killme, '_'*len( killme ) )
+            try:
+                outext = 'zip'
+                if format == 'zip':
+                    # Can't use mkstemp - the file must not exist first
+                    tmpd = tempfile.mkdtemp()
+                    util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
+                    tmpf = os.path.join( tmpd, 'library_download.' + format )
+                    if trans.app.config.upstream_gzip:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+                    else:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                    archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
+                elif format == 'tgz':
+                    if trans.app.config.upstream_gzip:
+                        archive = StreamBall( 'w|' )
+                        outext = 'tar'
+                    else:
+                        archive = StreamBall( 'w|gz' )
+                        outext = 'tgz'
+                elif format == 'tbz':
+                    archive = StreamBall( 'w|bz2' )
+                    outext = 'tbz2'
+            except ( OSError, zipfile.BadZipfile ):
+                log.exception( "Unable to create archive for download" )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            except Exception:
+                log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
+            seen = []
+            for ld in library_datasets:
+                ldda = ld.library_dataset_dataset_association
+                ext = ldda.extension
+                is_composite = ext in composite_extensions
+                path = ""
+                parent_folder = ldda.library_dataset.folder
+                while parent_folder is not None:
+                    # Exclude the now-hidden "root folder"
+                    if parent_folder.parent is None:
+                        path = os.path.join( parent_folder.library_root[ 0 ].name, path )
+                        break
+                    path = os.path.join( parent_folder.name, path )
+                    parent_folder = parent_folder.parent
+                path += ldda.name
+                while path in seen:
+                    path += '_'
+                seen.append( path )
+                zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
+                outfname, zpathext = os.path.splitext( zpath )
+
+                if is_composite:  # need to add all the components from the extra_files_path to the zip
+                    if zpathext == '':
+                        zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
                         else:
-                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
-                        archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
-                    elif format == 'tgz':
-                        if trans.app.config.upstream_gzip:
-                            archive = StreamBall( 'w|' )
-                            outext = 'tar'
-                        else:
-                            archive = StreamBall( 'w|gz' )
-                            outext = 'tgz'
-                    elif format == 'tbz':
-                        archive = StreamBall( 'w|bz2' )
-                        outext = 'tbz2'
-                except ( OSError, zipfile.BadZipfile ):
-                    log.exception( "Unable to create archive for download" )
-                    raise exceptions.InternalServerError( "Unable to create archive for download." )
-                except Exception:
-                    log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
-                    raise exceptions.InternalServerError( "Unable to create archive for download." )
-                composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
-                seen = []
-                for ld in library_datasets:
-                    ldda = ld.library_dataset_dataset_association
-                    ext = ldda.extension
-                    is_composite = ext in composite_extensions
-                    path = ""
-                    parent_folder = ldda.library_dataset.folder
-                    while parent_folder is not None:
-                        # Exclude the now-hidden "root folder"
-                        if parent_folder.parent is None:
-                            path = os.path.join( parent_folder.library_root[ 0 ].name, path )
-                            break
-                        path = os.path.join( parent_folder.name, path )
-                        parent_folder = parent_folder.parent
-                    path += ldda.name
-                    while path in seen:
-                        path += '_'
-                    seen.append( path )
-                    zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
-                    outfname, zpathext = os.path.splitext( zpath )
+                            archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                    except IOError:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download." )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found. " )
+                    except Exception, e:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
 
-                    if is_composite:  # need to add all the components from the extra_files_path to the zip
-                        if zpathext == '':
-                            zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
+                    for fpath in flist:
+                        efp, fname = os.path.split(fpath)
+                        if fname > '':
+                            fname = fname.translate(trantab)
                         try:
                             if format == 'zip':
-                                archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
+                                archive.add( fpath, fname )
                             else:
-                                archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                                archive.add( fpath, fname, check_file=True )
                         except IOError:
-                            log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                            log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
                             raise exceptions.InternalServerError( "Unable to create archive for download." )
                         except ObjectNotFound:
-                            log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
-                            raise exceptions.ObjectNotFound( "Requested dataset not found. " )
-                        except Exception, e:
-                            log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
-                            raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
-
-                        flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
-                        for fpath in flist:
-                            efp, fname = os.path.split(fpath)
-                            if fname > '':
-                                fname = fname.translate(trantab)
-                            try:
-                                if format == 'zip':
-                                    archive.add( fpath, fname )
-                                else:
-                                    archive.add( fpath, fname, check_file=True )
-                            except IOError:
-                                log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
-                                raise exceptions.InternalServerError( "Unable to create archive for download." )
-                            except ObjectNotFound:
-                                log.exception( "Requested dataset %s does not exist on the host." % fpath )
-                                raise exceptions.ObjectNotFound( "Requested dataset not found." )
-                            except Exception, e:
-                                log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
-                                raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
-
-                    else:  # simple case
-                        try:
-                            if format == 'zip':
-                                archive.add( ldda.dataset.file_name, path )
-                            else:
-                                archive.add( ldda.dataset.file_name, path, check_file=True )
-                        except IOError:
-                            log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
-                            raise exceptions.InternalServerError( "Unable to create archive for download" )
-                        except ObjectNotFound:
-                            log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                            log.exception( "Requested dataset %s does not exist on the host." % fpath )
                             raise exceptions.ObjectNotFound( "Requested dataset not found." )
                         except Exception, e:
                             log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
-                            raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
-                lname = 'selected_dataset'
-                fname = lname.replace( ' ', '_' ) + '_files'
-                if format == 'zip':
-                    archive.close()
-                    trans.response.set_content_type( "application/octet-stream" )
-                    trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
-                    archive = util.streamball.ZipBall( tmpf, tmpd )
-                    archive.wsgi_status = trans.response.wsgi_status()
-                    archive.wsgi_headeritems = trans.response.wsgi_headeritems()
-                    return archive.stream
-                else:
-                    trans.response.set_content_type( "application/x-tar" )
-                    trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
-                    archive.wsgi_status = trans.response.wsgi_status()
-                    archive.wsgi_headeritems = trans.response.wsgi_headeritems()
-                    return archive.stream
+                            raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
+
+                else:  # simple case
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, path )
+                        else:
+                            archive.add( ldda.dataset.file_name, path, check_file=True )
+                    except IOError:
+                        log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download" )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found." )
+                    except Exception, e:
+                        log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
+                        raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
+            lname = 'selected_dataset'
+            fname = lname.replace( ' ', '_' ) + '_files'
+            if format == 'zip':
+                archive.close()
+                trans.response.set_content_type( "application/octet-stream" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive = util.streamball.ZipBall( tmpf, tmpd )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
+            else:
+                trans.response.set_content_type( "application/x-tar" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
         elif format == 'uncompressed':
             if len(library_datasets) != 1:
                 raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once." )
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/library_contents.py
--- a/lib/galaxy/webapps/galaxy/api/library_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/library_contents.py
@@ -146,17 +146,21 @@
         the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``).
 
         :type   library_id: str
-        :param  library_id: encoded id string of the library that contains this item
+        :param  library_id: encoded id string of the library where to create the new item
         :type   payload:    dict
         :param  payload:    dictionary structure containing:
 
             * folder_id:    the parent folder of the new item
-            * create_type:  the type of item to create ('file' or 'folder')
+            * create_type:  the type of item to create ('file', 'folder' or 'collection')
             * from_hda_id:  (optional) the id of an accessible HDA to copy into the
                 library
             * ldda_message: (optional) the new message attribute of the LDDA created
             * extended_metadata: (optional) dub-dictionary containing any extended
                 metadata to associate with the item
+            * link_data_only: (optional) either 'copy_files' (default) or 'link_to_files'
+            * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths'
+            * server_dir: (optional) only if upload_option is 'upload_directory'
+            * filesystem_paths: (optional) only if upload_option is 'upload_paths' and the user is an admin
 
         :rtype:     dict
         :returns:   a dictionary containing the id, name,
@@ -217,11 +221,9 @@
             return output
         else:
             rval = []
-            for k, v in output.items():
+            for v in output.values():
                 if ex_meta_payload is not None:
-                    """
-                    If there is extended metadata, store it, attach it to the dataset, and index it
-                    """
+                    # If there is extended metadata, store it, attach it to the dataset, and index it
                     ex_meta = ExtendedMetadata(ex_meta_payload)
                     trans.sa_session.add( ex_meta )
                     v.extended_metadata = ex_meta
@@ -343,9 +345,9 @@
             trans.sa_session.flush()
 
     def __decode_library_content_id( self, trans, content_id ):
-        if ( len( content_id ) % 16 == 0 ):
+        if len( content_id ) % 16 == 0:
             return 'LibraryDataset', content_id
-        elif ( content_id.startswith( 'F' ) ):
+        elif content_id.startswith( 'F' ):
             return 'LibraryFolder', content_id[ 1: ]
         else:
             raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) )
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -1,8 +1,6 @@
 import logging
 import os
-import tempfile
 import urllib
-import zipfile
 
 from galaxy import datatypes, eggs, model, util, web
 from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user
@@ -43,8 +41,8 @@
             accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
             accepted_filters = []
             for label, val in accepted_filter_labels_and_vals.items():
-               args = { self.key: val }
-               accepted_filters.append( grids.GridColumnFilter( label, args) )
+                args = { self.key: val }
+                accepted_filters.append( grids.GridColumnFilter( label, args) )
             return accepted_filters
 
     # Grid definition
@@ -147,7 +145,7 @@
         trans.response.set_content_type( 'text/plain' )
         exit_code = ""
         try:
-            job = self._get_job_for_dataset( dataset_id )
+            job = self._get_job_for_dataset( trans, dataset_id )
             exit_code = job.exit_code
         except:
             exit_code = "Invalid dataset ID or you are not allowed to access this dataset"
@@ -323,40 +321,38 @@
                     if params.annotation:
                         annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
                         self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
-                    """
                     # This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI
                     # Add or delete extended metadata
-                    if params.extended_metadata:
-                        em_string = params.extended_metadata
-                        if len(em_string):
-                            em_payload = None
-                            try:
-                                em_payload = loads(em_string)
-                            except Exception, e:
-                                message = 'Invalid JSON input'
-                                error = True
-                            if em_payload is not None:
-                                if data is not None:
-                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
-                                    if ex_obj is not None:
-                                        self.unset_item_extended_metadata_obj(trans, data)
-                                        self.delete_extended_metadata(trans, ex_obj)
-                                    ex_obj = self.create_extended_metadata(trans, em_payload)
-                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
-                                    message = "Updated Extended metadata '%s'." % data.name
-                                    status = 'done'
-                                else:
-                                    message = "data not found"
-                                    error = True
-                    else:
-                        if data is not None:
-                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
-                            if ex_obj is not None:
-                                self.unset_item_extended_metadata_obj(trans, data)
-                                self.delete_extended_metadata(trans, ex_obj)
-                        message = "Deleted Extended metadata '%s'." % data.name
-                        status = 'done'
-                    """
+#                    if params.extended_metadata:
+#                        em_string = params.extended_metadata
+#                        if len(em_string):
+#                            em_payload = None
+#                            try:
+#                                em_payload = loads(em_string)
+#                            except Exception, e:
+#                                message = 'Invalid JSON input'
+#                                error = True
+#                            if em_payload is not None:
+#                                if data is not None:
+#                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                                    if ex_obj is not None:
+#                                        self.unset_item_extended_metadata_obj(trans, data)
+#                                        self.delete_extended_metadata(trans, ex_obj)
+#                                    ex_obj = self.create_extended_metadata(trans, em_payload)
+#                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
+#                                    message = "Updated Extended metadata '%s'." % data.name
+#                                    status = 'done'
+#                                else:
+#                                    message = "data not found"
+#                                    error = True
+#                    else:
+#                        if data is not None:
+#                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                            if ex_obj is not None:
+#                                self.unset_item_extended_metadata_obj(trans, data)
+#                                self.delete_extended_metadata(trans, ex_obj)
+#                        message = "Deleted Extended metadata '%s'." % data.name
+#                        status = 'done'
 
                     # If setting metadata previously failed and all required elements have now been set, clear the failed state.
                     if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
@@ -954,13 +950,11 @@
         has_parameter_errors = False
         inherit_chain = hda.source_dataset_chain
         if inherit_chain:
-            job_dataset_association, dataset_association_container_name = inherit_chain[-1]
+            job_dataset_association = inherit_chain[-1][0]
         else:
             job_dataset_association = hda
         if job_dataset_association.creating_job_associations:
-            for assoc in job_dataset_association.creating_job_associations:
-                job = assoc.job
-                break
+            job = job_dataset_association.creating_job_associations[0].job
             if job:
                 # Get the tool object
                 try:
@@ -1024,7 +1018,7 @@
                     trans.sa_session.flush()
                     target_history_ids.append( new_history.id )
                 if user:
-                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )]
+                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ]
                 else:
                     target_histories = [ history ]
                 if len( target_histories ) != len( target_history_ids ):
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -162,6 +162,7 @@
                                                           default_action=default_action,
                                                           message=util.sanitize_text( message ),
                                                           status=status ) )
+
     @web.expose
     def library_info( self, trans, cntrller, **kwd ):
         params = util.Params( kwd )
@@ -222,6 +223,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def library_permissions( self, trans, cntrller, **kwd ):
         params = util.Params( kwd )
@@ -269,6 +271,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def create_folder( self, trans, cntrller, parent_id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -346,6 +349,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def folder_info( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -403,6 +407,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def folder_permissions( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -453,6 +458,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_edit_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -606,6 +612,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -655,6 +662,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_permissions( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -793,6 +801,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def upload_library_dataset( self, trans, cntrller, library_id, folder_id, **kwd ):
         params = util.Params( kwd )
@@ -1042,6 +1051,7 @@
                                     ldda_message=ldda_message,
                                     message=message,
                                     status=status )
+
     def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
         # Set up the traditional tool state/params
         tool_id = 'upload1'
@@ -1132,6 +1142,7 @@
         trans.sa_session.add( job )
         trans.sa_session.flush()
         return output
+
     def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ):
         link_data_only = params.get( 'link_data_only', 'copy_files' )
         uuid_str =  params.get( 'uuid', None )
@@ -1166,6 +1177,7 @@
             trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) )
             trans.sa_session.flush()
         return uploaded_dataset
+
     def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ):
         dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc)
         files = dir_response[0]
@@ -1176,6 +1188,7 @@
             name = os.path.basename( file )
             uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) )
         return uploaded_datasets, 200, None
+
     def _get_server_dir_files( self, params, full_dir, import_dir_desc ):
         files = []
         try:
@@ -1212,6 +1225,7 @@
             response_code = 400
             return None, response_code, message
         return files, None, None
+
     def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ):
         preserve_dirs = util.string_as_bool( params.get( 'preserve_dirs', False ) )
         uploaded_datasets = []
@@ -1246,6 +1260,7 @@
                     in_folder = None
                 files_and_folders.append((file_path, file, in_folder))
         return files_and_folders
+
     def _paths_list(self, params):
         return [ (l.strip(), os.path.abspath(l.strip())) for l in params.filesystem_paths.splitlines() if l.strip() ]
 
@@ -1463,6 +1478,7 @@
                                             ldda_message=ldda_message,
                                             message=message,
                                             status=status )
+
     def _build_roles_select_list( self, trans, cntrller, library, selected_role_ids=[] ):
         # Get the list of legitimate roles to display on the upload form.  If the library is public,
         # all active roles are legitimate.  If the library is restricted by the LIBRARY_ACCESS permission, only
@@ -1478,10 +1494,11 @@
             return roles_select_list
         else:
             return None
+
     def _build_upload_option_select_list( self, trans, upload_option, is_admin, do_not_include_values=[] ):
         # Build the upload_option select list.  The do_not_include_values param can contain options that
         # should not be included in the list.  For example, the 'upload_directory' option should not be
-        # included if uploading a new version of a librar dataset.
+        # included if uploading a new version of a library dataset.
         upload_refresh_on_change_values = []
         for option_value, option_label in trans.model.LibraryDataset.upload_options:
             if option_value not in do_not_include_values:
@@ -1508,6 +1525,7 @@
                         continue
                 upload_option_select_list.add_option( option_label, option_value, selected=option_value==upload_option )
         return upload_option_select_list
+
     def _get_populated_widgets( self, folder ):
         # See if we have any inherited templates.
         info_association, inherited = folder.get_info_association( inherited=True )
@@ -1517,6 +1535,7 @@
             return self.populate_widgets_from_kwd( trans, widgets, **kwd )
         else:
             return []
+
     @web.expose
     def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ):
         """Catches the dataset id and displays file contents as directed"""
@@ -1557,6 +1576,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status='error' ) )
+
     @web.expose
     def library_dataset_info( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -1606,6 +1626,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def library_dataset_permissions( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -1654,6 +1675,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def make_library_item_public( self, trans, cntrller, library_id, item_type, id, **kwd ):
         params = util.Params( kwd )
@@ -1696,6 +1718,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status=status ) )
+
     @web.expose
     def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ):
         # This method is called from 1 of 3 places:
@@ -2113,6 +2136,7 @@
                                     use_panels=use_panels,
                                     message=message,
                                     status=status )
+
     @web.expose
     def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ):
         params = util.Params( kwd )
@@ -2159,6 +2183,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status='done' ) )
+
     @web.expose
     def move_library_item( self, trans, cntrller, item_type, item_id, source_library_id='', make_target_current=True, **kwd ):
         # This method is called from one of the following places:
@@ -2374,6 +2399,7 @@
                                     use_panels=use_panels,
                                     message=message,
                                     status=status )
+
     @web.expose
     def delete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
         # This action will handle deleting all types of library items.  State is saved for libraries and
@@ -2441,6 +2467,7 @@
                                                               show_deleted=show_deleted,
                                                               message=message,
                                                               status=status ) )
+
     @web.expose
     def undelete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
         # This action will handle undeleting all types of library items
@@ -2509,6 +2536,7 @@
                                                               show_deleted=show_deleted,
                                                               message=message,
                                                               status=status ) )
+
     def _check_access( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         can_access = True
         if isinstance( item, trans.model.HistoryDatasetAssociation ):
@@ -2551,6 +2579,7 @@
                                                               show_deleted=show_deleted,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_add( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
         if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, item ) ):
@@ -2566,6 +2595,7 @@
                                                               show_deleted=show_deleted,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_manage( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         if isinstance( item, trans.model.LibraryDataset ):
             # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE and DATASET_MANAGE_PERMISSIONS permissions.
@@ -2594,6 +2624,7 @@
                                                               use_panels=use_panels,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_modify( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         # Deny modification if the user is not an admin and does not have the LIBRARY_MODIFY permission.
         if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
@@ -2619,6 +2650,7 @@
                            .options( eagerload_all( "actions" ) ) \
                            .order_by( trans.app.model.LibraryFolder.table.c.name ) \
                            .all()
+
 def activatable_folders( trans, folder ):
     return trans.sa_session.query( trans.app.model.LibraryFolder ) \
                            .filter_by( parent=folder, purged=False ) \
@@ -2685,6 +2717,7 @@
     if folder.parent:
         return branch_deleted( folder.parent )
     return False
+
 def get_containing_library_from_library_dataset( trans, library_dataset ):
     """Given a library_dataset, get the containing library"""
     folder = library_dataset.folder
@@ -2698,6 +2731,7 @@
         if library.root_folder == folder:
             return library
     return None
+
 def get_comptypes( trans ):
     comptypes_t = comptypes
     if trans.app.config.nginx_x_archive_files_base:
@@ -2710,6 +2744,7 @@
         except:
             pass
     return comptypes_t
+
 def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ):
     is_admin = trans.user_is_admin() and cntrller == 'library_admin'
     if is_admin:
@@ -2723,6 +2758,7 @@
                 accessible_items.append( item )
     # Sort by name
     return sort_by_attr( [ item for item in accessible_items ], sort_attr )
+
 def sort_by_attr( seq, attr ):
     """
     Sort the sequence of objects by object's attribute
@@ -2738,6 +2774,7 @@
     intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
     intermed.sort()
     return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
+
 def lucene_search( trans, cntrller, search_term, search_url, **kwd ):
     """Return display of results from a full-text lucene search of data libraries."""
     params = util.Params( kwd )
@@ -2749,6 +2786,7 @@
     response.close()
     lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ]
     return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' )
+
 def whoosh_search( trans, cntrller, search_term, **kwd ):
     """Return display of results from a full-text whoosh search of data libraries."""
     params = util.Params( kwd )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f2b125ad393e/
Changeset:   f2b125ad393e
User:        nsoranzo
Date:        2014-11-04 15:46:42+00:00
Summary:     Remove unused and buggy function.
Affected #:  2 files
diff -r f9e8de1c84b2d60cc727ec5b64da6fe67616b7ed -r f2b125ad393e0a1a0a72ddfd5497855576b5d1dd lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -504,9 +504,9 @@
             for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
                 username = username.replace( char, '-' )
             # Find a unique username - user can change it later
-            if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
+            if self.sa_session.query( self.app.model.User ).filter_by( username=username ).first():
                 i = 1
-                while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
+                while self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first():
                     i += 1
                 username += '-' + str(i)
             user.username = username
@@ -701,11 +701,6 @@
     def template_context( self ):
         return dict()
 
-    def make_form_data( self, name, **kwargs ):
-        rval = self.template_context[name] = FormData()
-        rval.values.update( kwargs )
-        return rval
-
     def set_message( self, message, type=None ):
         """
         Convenience method for setting the 'message' and 'message_type'
diff -r f9e8de1c84b2d60cc727ec5b64da6fe67616b7ed -r f2b125ad393e0a1a0a72ddfd5497855576b5d1dd lib/galaxy/webapps/demo_sequencer/framework/__init__.py
--- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
+++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
@@ -116,10 +116,7 @@
     @galaxy.web.framework.base.lazy_property
     def template_context( self ):
         return dict()
-    def make_form_data( self, name, **kwargs ):
-        rval = self.template_context[name] = FormData()
-        rval.values.update( kwargs )
-        return rval
+
     def set_message( self, message, type=None ):
         """
         Convenience method for setting the 'message' and 'message_type'
https://bitbucket.org/galaxy/galaxy-central/commits/636f0fff0061/
Changeset:   636f0fff0061
User:        nsoranzo
Date:        2014-11-05 13:28:14+00:00
Summary:     Add keys() and values() methods to Bunch.
Affected #:  1 file
diff -r f2b125ad393e0a1a0a72ddfd5497855576b5d1dd -r 636f0fff006148e875f55bab684821cc60b0630f lib/galaxy/util/bunch.py
--- a/lib/galaxy/util/bunch.py
+++ b/lib/galaxy/util/bunch.py
@@ -17,6 +17,12 @@
     def items(self):
         return self.__dict__.items()
 
+    def keys(self):
+        return self.__dict__.keys()
+
+    def values(self):
+        return self.__dict__.values()
+
     def __str__(self):
         return '%s' % self.__dict__
 
https://bitbucket.org/galaxy/galaxy-central/commits/e14c29154b8a/
Changeset:   e14c29154b8a
User:        nsoranzo
Date:        2014-11-04 15:48:33+00:00
Summary:     Bugs, doc and pylint fixes.
Affected #:  6 files
diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -13,7 +13,6 @@
 
 from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError
 from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable
-from galaxy import exceptions
 from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException
 from galaxy.exceptions import MessageException
 
@@ -27,7 +26,7 @@
 from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField
 from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField
 from galaxy.workflow.modules import module_factory, WorkflowModuleInjector, MissingToolException
-from galaxy.model.orm import eagerload, eagerload_all, desc, not_
+from galaxy.model.orm import eagerload, eagerload_all, desc
 from galaxy.security.validate_user_input import validate_publicname
 from galaxy.util.sanitize_html import sanitize_html
 from galaxy.model.item_attrs import Dictifiable, UsesAnnotations
@@ -85,7 +84,7 @@
         Convenience method to get a model object with the specified checks.
         """
         return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
-  
+
     # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin
     #   but *don't* then becomes difficult
     #def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
@@ -322,7 +321,7 @@
 
         # Initialize count dict with all states.
         state_count_dict = {}
-        for k, state in trans.app.model.Dataset.states.items():
+        for state in trans.app.model.Dataset.states.values():
             state_count_dict[ state ] = 0
 
         # Process query results, adding to count dict.
@@ -370,7 +369,7 @@
         # init counts, ids for each state
         state_counts = {}
         state_ids = {}
-        for key, state in trans.app.model.Dataset.states.items():
+        for state in trans.app.model.Dataset.states.values():
             state_counts[ state ] = 0
             state_ids[ state ] = []
 
@@ -566,7 +565,7 @@
             # DEPRECATION: We still support unencoded ids for backward compatibility
             try:
                 dataset_id = int( dataset_id )
-            except ValueError, v_err:
+            except ValueError:
                 raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) )
 
         try:
@@ -589,7 +588,7 @@
                 error( "You are not allowed to access this dataset" )
 
             if check_state and data.state == trans.model.Dataset.states.UPLOAD:
-                    return trans.show_error_message( "Please wait until this dataset finishes uploading "
+                return trans.show_error_message( "Please wait until this dataset finishes uploading "
                                                    + "before attempting to view it." )
         return data
 
@@ -651,7 +650,7 @@
                     check_ownership=check_ownership,
                     check_accessible=check_accessible,
                     check_state=check_state )
-            except Exception, exception:
+            except Exception:
                 pass
             hdas.append( hda )
         return hdas
@@ -711,7 +710,7 @@
 
         # ---- return here if deleted AND purged OR can't access
         purged = ( hda.purged or hda.dataset.purged )
-        if ( hda.deleted and purged ):
+        if hda.deleted and purged:
             #TODO: to_dict should really go AFTER this - only summary data
             return trans.security.encode_dict_ids( hda_dict )
 
@@ -747,10 +746,6 @@
         #TODO: it may also be wiser to remove from here and add as API call that loads the visualizations
         #           when the visualizations button is clicked (instead of preloading/pre-checking)
 
-        # ---- return here if deleted
-        if hda.deleted and not purged:
-            return trans.security.encode_dict_ids( hda_dict )
-
         return trans.security.encode_dict_ids( hda_dict )
 
     def get_inaccessible_hda_dict( self, trans, hda ):
@@ -892,7 +887,8 @@
     #           or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) )
 
     def can_current_user_add_to_library_item( self, trans, item ):
-        if not trans.user: return False
+        if not trans.user:
+            return False
         return (  ( trans.user_is_admin() )
                or ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) )
 
@@ -1411,11 +1407,6 @@
         # Get data provider.
         track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset )
 
-        if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ):
-            hda_ldda = "hda"
-        elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ):
-            hda_ldda = "ldda"
-
         # Get track definition.
         return {
             "track_type": dataset.datatype.track_type,
@@ -1703,7 +1694,7 @@
         data['name'] = workflow.name
         data['annotation'] = annotation_str
         if workflow.uuid is not None:
-            data['uuid'] = str(workflow.uuid)  
+            data['uuid'] = str(workflow.uuid)
         data['steps'] = {}
         # For each step, rebuild the form and encode the state
         for step in workflow.steps:
@@ -1741,18 +1732,16 @@
             step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str )
             # User outputs
             step_dict['user_outputs'] = []
-            """
-            module_outputs = module.get_data_outputs()
-            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
-            for output in step_outputs:
-                name = output.output_name
-                annotation = ""
-                for module_output in module_outputs:
-                    if module_output.get( 'name', None ) == name:
-                        output_type = module_output.get( 'extension', '' )
-                        break
-                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
-            """
+#            module_outputs = module.get_data_outputs()
+#            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
+#            for output in step_outputs:
+#                name = output.output_name
+#                annotation = ""
+#                for module_output in module_outputs:
+#                    if module_output.get( 'name', None ) == name:
+#                        output_type = module_output.get( 'extension', '' )
+#                        break
+#                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
 
             # All step outputs
             step_dict['outputs'] = []
@@ -2137,7 +2126,7 @@
         # We need the type of each template field widget
         widgets = item.get_template_widgets( trans )
         # The list of widgets may include an AddressField which we need to save if it is new
-        for index, widget_dict in enumerate( widgets ):
+        for widget_dict in widgets:
             widget = widget_dict[ 'widget' ]
             if isinstance( widget, AddressField ):
                 value = util.restore_text( params.get( widget.name, '' ) )
@@ -2218,7 +2207,7 @@
                         trans.sa_session.flush()
                         info_association = sra.run
                     else:
-                       info_association = assoc.run
+                        info_association = assoc.run
                 else:
                     info_association = None
             if info_association:
@@ -2362,7 +2351,7 @@
     def widget_fields_have_contents( self, widgets ):
         # Return True if any of the fields in widgets contain contents, widgets is a list of dictionaries that looks something like:
         # [{'widget': <galaxy.web.form_builder.TextField object at 0x10867aa10>, 'helptext': 'Field 0 help (Optional)', 'label': 'Field 0'}]
-        for i, field in enumerate( widgets ):
+        for field in widgets:
             if ( isinstance( field[ 'widget' ], TextArea ) or isinstance( field[ 'widget' ], TextField ) ) and field[ 'widget' ].value:
                 return True
             if isinstance( field[ 'widget' ], SelectField ) and field[ 'widget' ].options:
@@ -2383,7 +2372,7 @@
 
     def clean_field_contents( self, widgets, **kwd ):
         field_contents = {}
-        for index, widget_dict in enumerate( widgets ):
+        for widget_dict in widgets:
             widget = widget_dict[ 'widget' ]
             value = kwd.get( widget.name, ''  )
             if isinstance( widget, CheckboxField ):
@@ -2432,7 +2421,7 @@
         '''
         params = util.Params( kwd )
         values = {}
-        for index, field in enumerate( form_definition.fields ):
+        for field in form_definition.fields:
             field_type = field[ 'type' ]
             field_name = field[ 'name' ]
             input_value = params.get( field_name, '' )
@@ -2584,7 +2573,7 @@
         if message:
             return trans.fill_template( '/sharing_base.mako', item=self.get_item( trans, id ), message=message, status='error' )
         user.username = username
-        trans.sa_session.flush
+        trans.sa_session.flush()
         return self.sharing( trans, id, **kwargs )
 
     @web.expose
@@ -2646,34 +2635,34 @@
     @web.require_login( "share Galaxy items" )
     def sharing( self, trans, id, **kwargs ):
         """ Handle item sharing. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     @web.require_login( "share Galaxy items" )
     def share( self, trans, id=None, email="", **kwd ):
         """ Handle sharing an item with a particular user. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     def display_by_username_and_slug( self, trans, username, slug ):
         """ Display item by username and slug. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.json
     @web.require_login( "get item name and link" )
     def get_name_and_link_async( self, trans, id=None ):
         """ Returns item's name and link. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     @web.require_login("get item content asynchronously")
     def get_item_content_async( self, trans, id ):
         """ Returns item content in HTML format. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     def get_item( self, trans, id ):
         """ Return item based on id. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
 
 class UsesQuotaMixin( object ):
@@ -2690,7 +2679,7 @@
     def _get_user_tags( self, trans, item_class_name, id ):
         user = trans.user
         tagged_item = self._get_tagged_item( trans, item_class_name, id )
-        return [ tag for tag in tagged_item.tags if ( tag.user == user ) ]
+        return [ tag for tag in tagged_item.tags if tag.user == user ]
 
     def _get_tagged_item( self, trans, item_class_name, id, check_ownership=True ):
         tagged_item = self.get_object( trans, id, item_class_name, check_ownership=check_ownership, check_accessible=True )
@@ -2754,7 +2743,6 @@
         return sorted( tags )
 
 
-
 class UsesExtendedMetadataMixin( SharableItemSecurityMixin ):
     """ Mixin for getting and setting item extended metadata. """
 
@@ -2844,10 +2832,10 @@
             yield prefix, ("%s" % (meta)).encode("utf8", errors='replace')
 
 
-"""
-Deprecated: `BaseController` used to be available under the name `Root`
-"""
 class ControllerUnavailable( Exception ):
+    """
+    Deprecated: `BaseController` used to be available under the name `Root`
+    """
     pass
 
 ## ---- Utility methods -------------------------------------------------------
diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -92,7 +92,7 @@
 
         :rtype:     dictionary
         :returns:   detailed history information from
-            :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_history_dict`
+            :func:`galaxy.web.base.controller.UsesHistoryMixin.get_history_dict`
         """
         history_id = id
         deleted = string_as_bool( deleted )
diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/api/lda_datasets.py
--- a/lib/galaxy/webapps/galaxy/api/lda_datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
@@ -480,7 +480,7 @@
         tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd )
         tool_params = state.inputs
         dataset_upload_inputs = []
-        for input_name, input in tool.inputs.iteritems():
+        for input in tool.inputs.itervalues():
             if input.type == "upload_dataset":
                 dataset_upload_inputs.append( input )
         library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) )
@@ -536,7 +536,7 @@
         * POST /api/libraries/datasets/download/{format}
             Downloads requested datasets (identified by encoded IDs) in requested format.
 
-        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ldda_ids%255B%255D=a0d84b45643a2678&ldda_ids%255B%255D=fe38c84dcd46c828``
+        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828``
 
         .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed'
 
@@ -554,7 +554,7 @@
         datasets_to_download = kwd.get( 'ld_ids%5B%5D', None )
         if datasets_to_download is None:
             datasets_to_download = kwd.get( 'ld_ids', None )
-        if ( datasets_to_download is not None ):
+        if datasets_to_download is not None:
             datasets_to_download = util.listify( datasets_to_download )
             for dataset_id in datasets_to_download:
                 try:
@@ -570,128 +570,128 @@
             raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids to download.' )
 
         if format in [ 'zip', 'tgz', 'tbz' ]:
-                # error = False
-                killme = string.punctuation + string.whitespace
-                trantab = string.maketrans( killme, '_'*len( killme ) )
-                try:
-                    outext = 'zip'
-                    if format == 'zip':
-                        # Can't use mkstemp - the file must not exist first
-                        tmpd = tempfile.mkdtemp()
-                        util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
-                        tmpf = os.path.join( tmpd, 'library_download.' + format )
-                        if trans.app.config.upstream_gzip:
-                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+            # error = False
+            killme = string.punctuation + string.whitespace
+            trantab = string.maketrans( killme, '_'*len( killme ) )
+            try:
+                outext = 'zip'
+                if format == 'zip':
+                    # Can't use mkstemp - the file must not exist first
+                    tmpd = tempfile.mkdtemp()
+                    util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
+                    tmpf = os.path.join( tmpd, 'library_download.' + format )
+                    if trans.app.config.upstream_gzip:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+                    else:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                    archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
+                elif format == 'tgz':
+                    if trans.app.config.upstream_gzip:
+                        archive = StreamBall( 'w|' )
+                        outext = 'tar'
+                    else:
+                        archive = StreamBall( 'w|gz' )
+                        outext = 'tgz'
+                elif format == 'tbz':
+                    archive = StreamBall( 'w|bz2' )
+                    outext = 'tbz2'
+            except ( OSError, zipfile.BadZipfile ):
+                log.exception( "Unable to create archive for download" )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            except Exception:
+                log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
+            seen = []
+            for ld in library_datasets:
+                ldda = ld.library_dataset_dataset_association
+                ext = ldda.extension
+                is_composite = ext in composite_extensions
+                path = ""
+                parent_folder = ldda.library_dataset.folder
+                while parent_folder is not None:
+                    # Exclude the now-hidden "root folder"
+                    if parent_folder.parent is None:
+                        path = os.path.join( parent_folder.library_root[ 0 ].name, path )
+                        break
+                    path = os.path.join( parent_folder.name, path )
+                    parent_folder = parent_folder.parent
+                path += ldda.name
+                while path in seen:
+                    path += '_'
+                seen.append( path )
+                zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
+                outfname, zpathext = os.path.splitext( zpath )
+
+                if is_composite:  # need to add all the components from the extra_files_path to the zip
+                    if zpathext == '':
+                        zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
                         else:
-                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
-                        archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
-                    elif format == 'tgz':
-                        if trans.app.config.upstream_gzip:
-                            archive = StreamBall( 'w|' )
-                            outext = 'tar'
-                        else:
-                            archive = StreamBall( 'w|gz' )
-                            outext = 'tgz'
-                    elif format == 'tbz':
-                        archive = StreamBall( 'w|bz2' )
-                        outext = 'tbz2'
-                except ( OSError, zipfile.BadZipfile ):
-                    log.exception( "Unable to create archive for download" )
-                    raise exceptions.InternalServerError( "Unable to create archive for download." )
-                except Exception:
-                    log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
-                    raise exceptions.InternalServerError( "Unable to create archive for download." )
-                composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
-                seen = []
-                for ld in library_datasets:
-                    ldda = ld.library_dataset_dataset_association
-                    ext = ldda.extension
-                    is_composite = ext in composite_extensions
-                    path = ""
-                    parent_folder = ldda.library_dataset.folder
-                    while parent_folder is not None:
-                        # Exclude the now-hidden "root folder"
-                        if parent_folder.parent is None:
-                            path = os.path.join( parent_folder.library_root[ 0 ].name, path )
-                            break
-                        path = os.path.join( parent_folder.name, path )
-                        parent_folder = parent_folder.parent
-                    path += ldda.name
-                    while path in seen:
-                        path += '_'
-                    seen.append( path )
-                    zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
-                    outfname, zpathext = os.path.splitext( zpath )
+                            archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                    except IOError:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download." )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found. " )
+                    except Exception, e:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
 
-                    if is_composite:  # need to add all the components from the extra_files_path to the zip
-                        if zpathext == '':
-                            zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
+                    for fpath in flist:
+                        efp, fname = os.path.split(fpath)
+                        if fname > '':
+                            fname = fname.translate(trantab)
                         try:
                             if format == 'zip':
-                                archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
+                                archive.add( fpath, fname )
                             else:
-                                archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                                archive.add( fpath, fname, check_file=True )
                         except IOError:
-                            log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                            log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
                             raise exceptions.InternalServerError( "Unable to create archive for download." )
                         except ObjectNotFound:
-                            log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
-                            raise exceptions.ObjectNotFound( "Requested dataset not found. " )
-                        except Exception, e:
-                            log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
-                            raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
-
-                        flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
-                        for fpath in flist:
-                            efp, fname = os.path.split(fpath)
-                            if fname > '':
-                                fname = fname.translate(trantab)
-                            try:
-                                if format == 'zip':
-                                    archive.add( fpath, fname )
-                                else:
-                                    archive.add( fpath, fname, check_file=True )
-                            except IOError:
-                                log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
-                                raise exceptions.InternalServerError( "Unable to create archive for download." )
-                            except ObjectNotFound:
-                                log.exception( "Requested dataset %s does not exist on the host." % fpath )
-                                raise exceptions.ObjectNotFound( "Requested dataset not found." )
-                            except Exception, e:
-                                log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
-                                raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
-
-                    else:  # simple case
-                        try:
-                            if format == 'zip':
-                                archive.add( ldda.dataset.file_name, path )
-                            else:
-                                archive.add( ldda.dataset.file_name, path, check_file=True )
-                        except IOError:
-                            log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
-                            raise exceptions.InternalServerError( "Unable to create archive for download" )
-                        except ObjectNotFound:
-                            log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                            log.exception( "Requested dataset %s does not exist on the host." % fpath )
                             raise exceptions.ObjectNotFound( "Requested dataset not found." )
                         except Exception, e:
                             log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
-                            raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
-                lname = 'selected_dataset'
-                fname = lname.replace( ' ', '_' ) + '_files'
-                if format == 'zip':
-                    archive.close()
-                    trans.response.set_content_type( "application/octet-stream" )
-                    trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
-                    archive = util.streamball.ZipBall( tmpf, tmpd )
-                    archive.wsgi_status = trans.response.wsgi_status()
-                    archive.wsgi_headeritems = trans.response.wsgi_headeritems()
-                    return archive.stream
-                else:
-                    trans.response.set_content_type( "application/x-tar" )
-                    trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
-                    archive.wsgi_status = trans.response.wsgi_status()
-                    archive.wsgi_headeritems = trans.response.wsgi_headeritems()
-                    return archive.stream
+                            raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
+
+                else:  # simple case
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, path )
+                        else:
+                            archive.add( ldda.dataset.file_name, path, check_file=True )
+                    except IOError:
+                        log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download" )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found." )
+                    except Exception, e:
+                        log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
+                        raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
+            lname = 'selected_dataset'
+            fname = lname.replace( ' ', '_' ) + '_files'
+            if format == 'zip':
+                archive.close()
+                trans.response.set_content_type( "application/octet-stream" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive = util.streamball.ZipBall( tmpf, tmpd )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
+            else:
+                trans.response.set_content_type( "application/x-tar" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
         elif format == 'uncompressed':
             if len(library_datasets) != 1:
                 raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once." )
diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/api/library_contents.py
--- a/lib/galaxy/webapps/galaxy/api/library_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/library_contents.py
@@ -146,17 +146,21 @@
         the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``).
 
         :type   library_id: str
-        :param  library_id: encoded id string of the library that contains this item
+        :param  library_id: encoded id string of the library where to create the new item
         :type   payload:    dict
         :param  payload:    dictionary structure containing:
 
             * folder_id:    the parent folder of the new item
-            * create_type:  the type of item to create ('file' or 'folder')
+            * create_type:  the type of item to create ('file', 'folder' or 'collection')
             * from_hda_id:  (optional) the id of an accessible HDA to copy into the
                 library
             * ldda_message: (optional) the new message attribute of the LDDA created
             * extended_metadata: (optional) dub-dictionary containing any extended
                 metadata to associate with the item
+            * link_data_only: (optional) either 'copy_files' (default) or 'link_to_files'
+            * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths'
+            * server_dir: (optional) only if upload_option is 'upload_directory'
+            * filesystem_paths: (optional) only if upload_option is 'upload_paths' and the user is an admin
 
         :rtype:     dict
         :returns:   a dictionary containing the id, name,
@@ -217,11 +221,9 @@
             return output
         else:
             rval = []
-            for k, v in output.items():
+            for v in output.values():
                 if ex_meta_payload is not None:
-                    """
-                    If there is extended metadata, store it, attach it to the dataset, and index it
-                    """
+                    # If there is extended metadata, store it, attach it to the dataset, and index it
                     ex_meta = ExtendedMetadata(ex_meta_payload)
                     trans.sa_session.add( ex_meta )
                     v.extended_metadata = ex_meta
@@ -343,9 +345,9 @@
             trans.sa_session.flush()
 
     def __decode_library_content_id( self, trans, content_id ):
-        if ( len( content_id ) % 16 == 0 ):
+        if len( content_id ) % 16 == 0:
             return 'LibraryDataset', content_id
-        elif ( content_id.startswith( 'F' ) ):
+        elif content_id.startswith( 'F' ):
             return 'LibraryFolder', content_id[ 1: ]
         else:
             raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) )
diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -1,8 +1,6 @@
 import logging
 import os
-import tempfile
 import urllib
-import zipfile
 
 from galaxy import datatypes, eggs, model, util, web
 from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user
@@ -43,8 +41,8 @@
             accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
             accepted_filters = []
             for label, val in accepted_filter_labels_and_vals.items():
-               args = { self.key: val }
-               accepted_filters.append( grids.GridColumnFilter( label, args) )
+                args = { self.key: val }
+                accepted_filters.append( grids.GridColumnFilter( label, args) )
             return accepted_filters
 
     # Grid definition
@@ -147,7 +145,7 @@
         trans.response.set_content_type( 'text/plain' )
         exit_code = ""
         try:
-            job = self._get_job_for_dataset( dataset_id )
+            job = self._get_job_for_dataset( trans, dataset_id )
             exit_code = job.exit_code
         except:
             exit_code = "Invalid dataset ID or you are not allowed to access this dataset"
@@ -323,40 +321,38 @@
                     if params.annotation:
                         annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
                         self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
-                    """
                     # This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI
                     # Add or delete extended metadata
-                    if params.extended_metadata:
-                        em_string = params.extended_metadata
-                        if len(em_string):
-                            em_payload = None
-                            try:
-                                em_payload = loads(em_string)
-                            except Exception, e:
-                                message = 'Invalid JSON input'
-                                error = True
-                            if em_payload is not None:
-                                if data is not None:
-                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
-                                    if ex_obj is not None:
-                                        self.unset_item_extended_metadata_obj(trans, data)
-                                        self.delete_extended_metadata(trans, ex_obj)
-                                    ex_obj = self.create_extended_metadata(trans, em_payload)
-                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
-                                    message = "Updated Extended metadata '%s'." % data.name
-                                    status = 'done'
-                                else:
-                                    message = "data not found"
-                                    error = True
-                    else:
-                        if data is not None:
-                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
-                            if ex_obj is not None:
-                                self.unset_item_extended_metadata_obj(trans, data)
-                                self.delete_extended_metadata(trans, ex_obj)
-                        message = "Deleted Extended metadata '%s'." % data.name
-                        status = 'done'
-                    """
+#                    if params.extended_metadata:
+#                        em_string = params.extended_metadata
+#                        if len(em_string):
+#                            em_payload = None
+#                            try:
+#                                em_payload = loads(em_string)
+#                            except Exception, e:
+#                                message = 'Invalid JSON input'
+#                                error = True
+#                            if em_payload is not None:
+#                                if data is not None:
+#                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                                    if ex_obj is not None:
+#                                        self.unset_item_extended_metadata_obj(trans, data)
+#                                        self.delete_extended_metadata(trans, ex_obj)
+#                                    ex_obj = self.create_extended_metadata(trans, em_payload)
+#                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
+#                                    message = "Updated Extended metadata '%s'." % data.name
+#                                    status = 'done'
+#                                else:
+#                                    message = "data not found"
+#                                    error = True
+#                    else:
+#                        if data is not None:
+#                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                            if ex_obj is not None:
+#                                self.unset_item_extended_metadata_obj(trans, data)
+#                                self.delete_extended_metadata(trans, ex_obj)
+#                        message = "Deleted Extended metadata '%s'." % data.name
+#                        status = 'done'
 
                     # If setting metadata previously failed and all required elements have now been set, clear the failed state.
                     if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
@@ -954,13 +950,11 @@
         has_parameter_errors = False
         inherit_chain = hda.source_dataset_chain
         if inherit_chain:
-            job_dataset_association, dataset_association_container_name = inherit_chain[-1]
+            job_dataset_association = inherit_chain[-1][0]
         else:
             job_dataset_association = hda
         if job_dataset_association.creating_job_associations:
-            for assoc in job_dataset_association.creating_job_associations:
-                job = assoc.job
-                break
+            job = job_dataset_association.creating_job_associations[0].job
             if job:
                 # Get the tool object
                 try:
@@ -1024,7 +1018,7 @@
                     trans.sa_session.flush()
                     target_history_ids.append( new_history.id )
                 if user:
-                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )]
+                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ]
                 else:
                     target_histories = [ history ]
                 if len( target_histories ) != len( target_history_ids ):
diff -r 636f0fff006148e875f55bab684821cc60b0630f -r e14c29154b8ac2ce85aacbe8435b8b73108c8464 lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -162,6 +162,7 @@
                                                           default_action=default_action,
                                                           message=util.sanitize_text( message ),
                                                           status=status ) )
+
     @web.expose
     def library_info( self, trans, cntrller, **kwd ):
         params = util.Params( kwd )
@@ -222,6 +223,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def library_permissions( self, trans, cntrller, **kwd ):
         params = util.Params( kwd )
@@ -269,6 +271,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def create_folder( self, trans, cntrller, parent_id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -346,6 +349,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def folder_info( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -403,6 +407,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def folder_permissions( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -453,6 +458,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_edit_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -606,6 +612,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -655,6 +662,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_permissions( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -793,6 +801,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def upload_library_dataset( self, trans, cntrller, library_id, folder_id, **kwd ):
         params = util.Params( kwd )
@@ -1042,6 +1051,7 @@
                                     ldda_message=ldda_message,
                                     message=message,
                                     status=status )
+
     def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
         # Set up the traditional tool state/params
         tool_id = 'upload1'
@@ -1132,6 +1142,7 @@
         trans.sa_session.add( job )
         trans.sa_session.flush()
         return output
+
     def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ):
         link_data_only = params.get( 'link_data_only', 'copy_files' )
         uuid_str =  params.get( 'uuid', None )
@@ -1166,6 +1177,7 @@
             trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) )
             trans.sa_session.flush()
         return uploaded_dataset
+
     def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ):
         dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc)
         files = dir_response[0]
@@ -1176,6 +1188,7 @@
             name = os.path.basename( file )
             uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) )
         return uploaded_datasets, 200, None
+
     def _get_server_dir_files( self, params, full_dir, import_dir_desc ):
         files = []
         try:
@@ -1212,6 +1225,7 @@
             response_code = 400
             return None, response_code, message
         return files, None, None
+
     def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ):
         preserve_dirs = util.string_as_bool( params.get( 'preserve_dirs', False ) )
         uploaded_datasets = []
@@ -1246,6 +1260,7 @@
                     in_folder = None
                 files_and_folders.append((file_path, file, in_folder))
         return files_and_folders
+
     def _paths_list(self, params):
         return [ (l.strip(), os.path.abspath(l.strip())) for l in params.filesystem_paths.splitlines() if l.strip() ]
 
@@ -1463,6 +1478,7 @@
                                             ldda_message=ldda_message,
                                             message=message,
                                             status=status )
+
     def _build_roles_select_list( self, trans, cntrller, library, selected_role_ids=[] ):
         # Get the list of legitimate roles to display on the upload form.  If the library is public,
         # all active roles are legitimate.  If the library is restricted by the LIBRARY_ACCESS permission, only
@@ -1478,10 +1494,11 @@
             return roles_select_list
         else:
             return None
+
     def _build_upload_option_select_list( self, trans, upload_option, is_admin, do_not_include_values=[] ):
         # Build the upload_option select list.  The do_not_include_values param can contain options that
         # should not be included in the list.  For example, the 'upload_directory' option should not be
-        # included if uploading a new version of a librar dataset.
+        # included if uploading a new version of a library dataset.
         upload_refresh_on_change_values = []
         for option_value, option_label in trans.model.LibraryDataset.upload_options:
             if option_value not in do_not_include_values:
@@ -1508,6 +1525,7 @@
                         continue
                 upload_option_select_list.add_option( option_label, option_value, selected=option_value==upload_option )
         return upload_option_select_list
+
     def _get_populated_widgets( self, folder ):
         # See if we have any inherited templates.
         info_association, inherited = folder.get_info_association( inherited=True )
@@ -1517,6 +1535,7 @@
             return self.populate_widgets_from_kwd( trans, widgets, **kwd )
         else:
             return []
+
     @web.expose
     def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ):
         """Catches the dataset id and displays file contents as directed"""
@@ -1557,6 +1576,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status='error' ) )
+
     @web.expose
     def library_dataset_info( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -1606,6 +1626,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def library_dataset_permissions( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -1654,6 +1675,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def make_library_item_public( self, trans, cntrller, library_id, item_type, id, **kwd ):
         params = util.Params( kwd )
@@ -1696,6 +1718,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status=status ) )
+
     @web.expose
     def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ):
         # This method is called from 1 of 3 places:
@@ -2113,6 +2136,7 @@
                                     use_panels=use_panels,
                                     message=message,
                                     status=status )
+
     @web.expose
     def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ):
         params = util.Params( kwd )
@@ -2159,6 +2183,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status='done' ) )
+
     @web.expose
     def move_library_item( self, trans, cntrller, item_type, item_id, source_library_id='', make_target_current=True, **kwd ):
         # This method is called from one of the following places:
@@ -2374,6 +2399,7 @@
                                     use_panels=use_panels,
                                     message=message,
                                     status=status )
+
     @web.expose
     def delete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
         # This action will handle deleting all types of library items.  State is saved for libraries and
@@ -2441,6 +2467,7 @@
                                                               show_deleted=show_deleted,
                                                               message=message,
                                                               status=status ) )
+
     @web.expose
     def undelete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
         # This action will handle undeleting all types of library items
@@ -2509,6 +2536,7 @@
                                                               show_deleted=show_deleted,
                                                               message=message,
                                                               status=status ) )
+
     def _check_access( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         can_access = True
         if isinstance( item, trans.model.HistoryDatasetAssociation ):
@@ -2551,6 +2579,7 @@
                                                               show_deleted=show_deleted,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_add( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
         if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, item ) ):
@@ -2566,6 +2595,7 @@
                                                               show_deleted=show_deleted,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_manage( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         if isinstance( item, trans.model.LibraryDataset ):
             # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE and DATASET_MANAGE_PERMISSIONS permissions.
@@ -2594,6 +2624,7 @@
                                                               use_panels=use_panels,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_modify( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         # Deny modification if the user is not an admin and does not have the LIBRARY_MODIFY permission.
         if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
@@ -2619,6 +2650,7 @@
                            .options( eagerload_all( "actions" ) ) \
                            .order_by( trans.app.model.LibraryFolder.table.c.name ) \
                            .all()
+
 def activatable_folders( trans, folder ):
     return trans.sa_session.query( trans.app.model.LibraryFolder ) \
                            .filter_by( parent=folder, purged=False ) \
@@ -2685,6 +2717,7 @@
     if folder.parent:
         return branch_deleted( folder.parent )
     return False
+
 def get_containing_library_from_library_dataset( trans, library_dataset ):
     """Given a library_dataset, get the containing library"""
     folder = library_dataset.folder
@@ -2698,6 +2731,7 @@
         if library.root_folder == folder:
             return library
     return None
+
 def get_comptypes( trans ):
     comptypes_t = comptypes
     if trans.app.config.nginx_x_archive_files_base:
@@ -2710,6 +2744,7 @@
         except:
             pass
     return comptypes_t
+
 def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ):
     is_admin = trans.user_is_admin() and cntrller == 'library_admin'
     if is_admin:
@@ -2723,6 +2758,7 @@
                 accessible_items.append( item )
     # Sort by name
     return sort_by_attr( [ item for item in accessible_items ], sort_attr )
+
 def sort_by_attr( seq, attr ):
     """
     Sort the sequence of objects by object's attribute
@@ -2738,6 +2774,7 @@
     intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
     intermed.sort()
     return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
+
 def lucene_search( trans, cntrller, search_term, search_url, **kwd ):
     """Return display of results from a full-text lucene search of data libraries."""
     params = util.Params( kwd )
@@ -2749,6 +2786,7 @@
     response.close()
     lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ]
     return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' )
+
 def whoosh_search( trans, cntrller, search_term, **kwd ):
     """Return display of results from a full-text whoosh search of data libraries."""
     params = util.Params( kwd )
https://bitbucket.org/galaxy/galaxy-central/commits/a3ebaac5d312/
Changeset:   a3ebaac5d312
User:        jmchilton
Date:        2014-11-10 16:04:02+00:00
Summary:     Merged in nsoranzo/galaxy-central (pull request #551)
Small bugs, doc and pylint fixes.
Affected #:  9 files
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/util/bunch.py
--- a/lib/galaxy/util/bunch.py
+++ b/lib/galaxy/util/bunch.py
@@ -17,6 +17,12 @@
     def items(self):
         return self.__dict__.items()
 
+    def keys(self):
+        return self.__dict__.keys()
+
+    def values(self):
+        return self.__dict__.values()
+
     def __str__(self):
         return '%s' % self.__dict__
 
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -13,7 +13,6 @@
 
 from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError
 from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable
-from galaxy import exceptions
 from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException
 from galaxy.exceptions import MessageException
 
@@ -27,7 +26,7 @@
 from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField
 from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField
 from galaxy.workflow.modules import module_factory, WorkflowModuleInjector, MissingToolException
-from galaxy.model.orm import eagerload, eagerload_all, desc, not_
+from galaxy.model.orm import eagerload, eagerload_all, desc
 from galaxy.security.validate_user_input import validate_publicname
 from galaxy.util.sanitize_html import sanitize_html
 from galaxy.model.item_attrs import Dictifiable, UsesAnnotations
@@ -85,7 +84,7 @@
         Convenience method to get a model object with the specified checks.
         """
         return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
-  
+
     # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin
     #   but *don't* then becomes difficult
     #def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
@@ -322,7 +321,7 @@
 
         # Initialize count dict with all states.
         state_count_dict = {}
-        for k, state in trans.app.model.Dataset.states.items():
+        for state in trans.app.model.Dataset.states.values():
             state_count_dict[ state ] = 0
 
         # Process query results, adding to count dict.
@@ -370,7 +369,7 @@
         # init counts, ids for each state
         state_counts = {}
         state_ids = {}
-        for key, state in trans.app.model.Dataset.states.items():
+        for state in trans.app.model.Dataset.states.values():
             state_counts[ state ] = 0
             state_ids[ state ] = []
 
@@ -566,7 +565,7 @@
             # DEPRECATION: We still support unencoded ids for backward compatibility
             try:
                 dataset_id = int( dataset_id )
-            except ValueError, v_err:
+            except ValueError:
                 raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) )
 
         try:
@@ -589,7 +588,7 @@
                 error( "You are not allowed to access this dataset" )
 
             if check_state and data.state == trans.model.Dataset.states.UPLOAD:
-                    return trans.show_error_message( "Please wait until this dataset finishes uploading "
+                return trans.show_error_message( "Please wait until this dataset finishes uploading "
                                                    + "before attempting to view it." )
         return data
 
@@ -651,7 +650,7 @@
                     check_ownership=check_ownership,
                     check_accessible=check_accessible,
                     check_state=check_state )
-            except Exception, exception:
+            except Exception:
                 pass
             hdas.append( hda )
         return hdas
@@ -711,7 +710,7 @@
 
         # ---- return here if deleted AND purged OR can't access
         purged = ( hda.purged or hda.dataset.purged )
-        if ( hda.deleted and purged ):
+        if hda.deleted and purged:
             #TODO: to_dict should really go AFTER this - only summary data
             return trans.security.encode_dict_ids( hda_dict )
 
@@ -747,10 +746,6 @@
         #TODO: it may also be wiser to remove from here and add as API call that loads the visualizations
         #           when the visualizations button is clicked (instead of preloading/pre-checking)
 
-        # ---- return here if deleted
-        if hda.deleted and not purged:
-            return trans.security.encode_dict_ids( hda_dict )
-
         return trans.security.encode_dict_ids( hda_dict )
 
     def get_inaccessible_hda_dict( self, trans, hda ):
@@ -892,7 +887,8 @@
     #           or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) )
 
     def can_current_user_add_to_library_item( self, trans, item ):
-        if not trans.user: return False
+        if not trans.user:
+            return False
         return (  ( trans.user_is_admin() )
                or ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) )
 
@@ -1411,11 +1407,6 @@
         # Get data provider.
         track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset )
 
-        if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ):
-            hda_ldda = "hda"
-        elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ):
-            hda_ldda = "ldda"
-
         # Get track definition.
         return {
             "track_type": dataset.datatype.track_type,
@@ -1705,7 +1696,7 @@
         data['name'] = workflow.name
         data['annotation'] = annotation_str
         if workflow.uuid is not None:
-            data['uuid'] = str(workflow.uuid)  
+            data['uuid'] = str(workflow.uuid)
         data['steps'] = {}
         # For each step, rebuild the form and encode the state
         for step in workflow.steps:
@@ -1743,18 +1734,16 @@
             step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str )
             # User outputs
             step_dict['user_outputs'] = []
-            """
-            module_outputs = module.get_data_outputs()
-            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
-            for output in step_outputs:
-                name = output.output_name
-                annotation = ""
-                for module_output in module_outputs:
-                    if module_output.get( 'name', None ) == name:
-                        output_type = module_output.get( 'extension', '' )
-                        break
-                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
-            """
+#            module_outputs = module.get_data_outputs()
+#            step_outputs = trans.sa_session.query( WorkflowOutput ).filter( step=step )
+#            for output in step_outputs:
+#                name = output.output_name
+#                annotation = ""
+#                for module_output in module_outputs:
+#                    if module_output.get( 'name', None ) == name:
+#                        output_type = module_output.get( 'extension', '' )
+#                        break
+#                data['outputs'][name] = { 'name' : name, 'annotation' : annotation, 'type' : output_type }
 
             # All step outputs
             step_dict['outputs'] = []
@@ -2139,7 +2128,7 @@
         # We need the type of each template field widget
         widgets = item.get_template_widgets( trans )
         # The list of widgets may include an AddressField which we need to save if it is new
-        for index, widget_dict in enumerate( widgets ):
+        for widget_dict in widgets:
             widget = widget_dict[ 'widget' ]
             if isinstance( widget, AddressField ):
                 value = util.restore_text( params.get( widget.name, '' ) )
@@ -2220,7 +2209,7 @@
                         trans.sa_session.flush()
                         info_association = sra.run
                     else:
-                       info_association = assoc.run
+                        info_association = assoc.run
                 else:
                     info_association = None
             if info_association:
@@ -2364,7 +2353,7 @@
     def widget_fields_have_contents( self, widgets ):
         # Return True if any of the fields in widgets contain contents, widgets is a list of dictionaries that looks something like:
         # [{'widget': <galaxy.web.form_builder.TextField object at 0x10867aa10>, 'helptext': 'Field 0 help (Optional)', 'label': 'Field 0'}]
-        for i, field in enumerate( widgets ):
+        for field in widgets:
             if ( isinstance( field[ 'widget' ], TextArea ) or isinstance( field[ 'widget' ], TextField ) ) and field[ 'widget' ].value:
                 return True
             if isinstance( field[ 'widget' ], SelectField ) and field[ 'widget' ].options:
@@ -2385,7 +2374,7 @@
 
     def clean_field_contents( self, widgets, **kwd ):
         field_contents = {}
-        for index, widget_dict in enumerate( widgets ):
+        for widget_dict in widgets:
             widget = widget_dict[ 'widget' ]
             value = kwd.get( widget.name, ''  )
             if isinstance( widget, CheckboxField ):
@@ -2434,7 +2423,7 @@
         '''
         params = util.Params( kwd )
         values = {}
-        for index, field in enumerate( form_definition.fields ):
+        for field in form_definition.fields:
             field_type = field[ 'type' ]
             field_name = field[ 'name' ]
             input_value = params.get( field_name, '' )
@@ -2586,7 +2575,7 @@
         if message:
             return trans.fill_template( '/sharing_base.mako', item=self.get_item( trans, id ), message=message, status='error' )
         user.username = username
-        trans.sa_session.flush
+        trans.sa_session.flush()
         return self.sharing( trans, id, **kwargs )
 
     @web.expose
@@ -2648,34 +2637,34 @@
     @web.require_login( "share Galaxy items" )
     def sharing( self, trans, id, **kwargs ):
         """ Handle item sharing. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     @web.require_login( "share Galaxy items" )
     def share( self, trans, id=None, email="", **kwd ):
         """ Handle sharing an item with a particular user. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     def display_by_username_and_slug( self, trans, username, slug ):
         """ Display item by username and slug. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.json
     @web.require_login( "get item name and link" )
     def get_name_and_link_async( self, trans, id=None ):
         """ Returns item's name and link. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     @web.expose
     @web.require_login("get item content asynchronously")
     def get_item_content_async( self, trans, id ):
         """ Returns item content in HTML format. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
     def get_item( self, trans, id ):
         """ Return item based on id. """
-        raise "Unimplemented Method"
+        raise NotImplementedError()
 
 
 class UsesQuotaMixin( object ):
@@ -2692,7 +2681,7 @@
     def _get_user_tags( self, trans, item_class_name, id ):
         user = trans.user
         tagged_item = self._get_tagged_item( trans, item_class_name, id )
-        return [ tag for tag in tagged_item.tags if ( tag.user == user ) ]
+        return [ tag for tag in tagged_item.tags if tag.user == user ]
 
     def _get_tagged_item( self, trans, item_class_name, id, check_ownership=True ):
         tagged_item = self.get_object( trans, id, item_class_name, check_ownership=check_ownership, check_accessible=True )
@@ -2756,7 +2745,6 @@
         return sorted( tags )
 
 
-
 class UsesExtendedMetadataMixin( SharableItemSecurityMixin ):
     """ Mixin for getting and setting item extended metadata. """
 
@@ -2846,10 +2834,10 @@
             yield prefix, ("%s" % (meta)).encode("utf8", errors='replace')
 
 
-"""
-Deprecated: `BaseController` used to be available under the name `Root`
-"""
 class ControllerUnavailable( Exception ):
+    """
+    Deprecated: `BaseController` used to be available under the name `Root`
+    """
     pass
 
 ## ---- Utility methods -------------------------------------------------------
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -504,9 +504,9 @@
             for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
                 username = username.replace( char, '-' )
             # Find a unique username - user can change it later
-            if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
+            if self.sa_session.query( self.app.model.User ).filter_by( username=username ).first():
                 i = 1
-                while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
+                while self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first():
                     i += 1
                 username += '-' + str(i)
             user.username = username
@@ -701,11 +701,6 @@
     def template_context( self ):
         return dict()
 
-    def make_form_data( self, name, **kwargs ):
-        rval = self.template_context[name] = FormData()
-        rval.values.update( kwargs )
-        return rval
-
     def set_message( self, message, type=None ):
         """
         Convenience method for setting the 'message' and 'message_type'
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/demo_sequencer/framework/__init__.py
--- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
+++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
@@ -116,10 +116,7 @@
     @galaxy.web.framework.base.lazy_property
     def template_context( self ):
         return dict()
-    def make_form_data( self, name, **kwargs ):
-        rval = self.template_context[name] = FormData()
-        rval.values.update( kwargs )
-        return rval
+
     def set_message( self, message, type=None ):
         """
         Convenience method for setting the 'message' and 'message_type'
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -92,7 +92,7 @@
 
         :rtype:     dictionary
         :returns:   detailed history information from
-            :func:`galaxy.web.base.controller.UsesHistoryDatasetAssociationMixin.get_history_dict`
+            :func:`galaxy.web.base.controller.UsesHistoryMixin.get_history_dict`
         """
         history_id = id
         deleted = string_as_bool( deleted )
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/lda_datasets.py
--- a/lib/galaxy/webapps/galaxy/api/lda_datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
@@ -480,7 +480,7 @@
         tool.update_state( trans, tool.inputs_by_page[ 0 ], state.inputs, kwd )
         tool_params = state.inputs
         dataset_upload_inputs = []
-        for input_name, input in tool.inputs.iteritems():
+        for input in tool.inputs.itervalues():
             if input.type == "upload_dataset":
                 dataset_upload_inputs.append( input )
         library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) )
@@ -536,7 +536,7 @@
         * POST /api/libraries/datasets/download/{format}
             Downloads requested datasets (identified by encoded IDs) in requested format.
 
-        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ldda_ids%255B%255D=a0d84b45643a2678&ldda_ids%255B%255D=fe38c84dcd46c828``
+        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828``
 
         .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed'
 
@@ -554,7 +554,7 @@
         datasets_to_download = kwd.get( 'ld_ids%5B%5D', None )
         if datasets_to_download is None:
             datasets_to_download = kwd.get( 'ld_ids', None )
-        if ( datasets_to_download is not None ):
+        if datasets_to_download is not None:
             datasets_to_download = util.listify( datasets_to_download )
             for dataset_id in datasets_to_download:
                 try:
@@ -570,128 +570,128 @@
             raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids to download.' )
 
         if format in [ 'zip', 'tgz', 'tbz' ]:
-                # error = False
-                killme = string.punctuation + string.whitespace
-                trantab = string.maketrans( killme, '_'*len( killme ) )
-                try:
-                    outext = 'zip'
-                    if format == 'zip':
-                        # Can't use mkstemp - the file must not exist first
-                        tmpd = tempfile.mkdtemp()
-                        util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
-                        tmpf = os.path.join( tmpd, 'library_download.' + format )
-                        if trans.app.config.upstream_gzip:
-                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+            # error = False
+            killme = string.punctuation + string.whitespace
+            trantab = string.maketrans( killme, '_'*len( killme ) )
+            try:
+                outext = 'zip'
+                if format == 'zip':
+                    # Can't use mkstemp - the file must not exist first
+                    tmpd = tempfile.mkdtemp()
+                    util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
+                    tmpf = os.path.join( tmpd, 'library_download.' + format )
+                    if trans.app.config.upstream_gzip:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+                    else:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                    archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
+                elif format == 'tgz':
+                    if trans.app.config.upstream_gzip:
+                        archive = StreamBall( 'w|' )
+                        outext = 'tar'
+                    else:
+                        archive = StreamBall( 'w|gz' )
+                        outext = 'tgz'
+                elif format == 'tbz':
+                    archive = StreamBall( 'w|bz2' )
+                    outext = 'tbz2'
+            except ( OSError, zipfile.BadZipfile ):
+                log.exception( "Unable to create archive for download" )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            except Exception:
+                log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
+            seen = []
+            for ld in library_datasets:
+                ldda = ld.library_dataset_dataset_association
+                ext = ldda.extension
+                is_composite = ext in composite_extensions
+                path = ""
+                parent_folder = ldda.library_dataset.folder
+                while parent_folder is not None:
+                    # Exclude the now-hidden "root folder"
+                    if parent_folder.parent is None:
+                        path = os.path.join( parent_folder.library_root[ 0 ].name, path )
+                        break
+                    path = os.path.join( parent_folder.name, path )
+                    parent_folder = parent_folder.parent
+                path += ldda.name
+                while path in seen:
+                    path += '_'
+                seen.append( path )
+                zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
+                outfname, zpathext = os.path.splitext( zpath )
+
+                if is_composite:  # need to add all the components from the extra_files_path to the zip
+                    if zpathext == '':
+                        zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
                         else:
-                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
-                        archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
-                    elif format == 'tgz':
-                        if trans.app.config.upstream_gzip:
-                            archive = StreamBall( 'w|' )
-                            outext = 'tar'
-                        else:
-                            archive = StreamBall( 'w|gz' )
-                            outext = 'tgz'
-                    elif format == 'tbz':
-                        archive = StreamBall( 'w|bz2' )
-                        outext = 'tbz2'
-                except ( OSError, zipfile.BadZipfile ):
-                    log.exception( "Unable to create archive for download" )
-                    raise exceptions.InternalServerError( "Unable to create archive for download." )
-                except Exception:
-                    log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
-                    raise exceptions.InternalServerError( "Unable to create archive for download." )
-                composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
-                seen = []
-                for ld in library_datasets:
-                    ldda = ld.library_dataset_dataset_association
-                    ext = ldda.extension
-                    is_composite = ext in composite_extensions
-                    path = ""
-                    parent_folder = ldda.library_dataset.folder
-                    while parent_folder is not None:
-                        # Exclude the now-hidden "root folder"
-                        if parent_folder.parent is None:
-                            path = os.path.join( parent_folder.library_root[ 0 ].name, path )
-                            break
-                        path = os.path.join( parent_folder.name, path )
-                        parent_folder = parent_folder.parent
-                    path += ldda.name
-                    while path in seen:
-                        path += '_'
-                    seen.append( path )
-                    zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
-                    outfname, zpathext = os.path.splitext( zpath )
+                            archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                    except IOError:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download." )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found. " )
+                    except Exception, e:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
 
-                    if is_composite:  # need to add all the components from the extra_files_path to the zip
-                        if zpathext == '':
-                            zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
+                    for fpath in flist:
+                        efp, fname = os.path.split(fpath)
+                        if fname > '':
+                            fname = fname.translate(trantab)
                         try:
                             if format == 'zip':
-                                archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
+                                archive.add( fpath, fname )
                             else:
-                                archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                                archive.add( fpath, fname, check_file=True )
                         except IOError:
-                            log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                            log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
                             raise exceptions.InternalServerError( "Unable to create archive for download." )
                         except ObjectNotFound:
-                            log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
-                            raise exceptions.ObjectNotFound( "Requested dataset not found. " )
-                        except Exception, e:
-                            log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
-                            raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
-
-                        flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
-                        for fpath in flist:
-                            efp, fname = os.path.split(fpath)
-                            if fname > '':
-                                fname = fname.translate(trantab)
-                            try:
-                                if format == 'zip':
-                                    archive.add( fpath, fname )
-                                else:
-                                    archive.add( fpath, fname, check_file=True )
-                            except IOError:
-                                log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
-                                raise exceptions.InternalServerError( "Unable to create archive for download." )
-                            except ObjectNotFound:
-                                log.exception( "Requested dataset %s does not exist on the host." % fpath )
-                                raise exceptions.ObjectNotFound( "Requested dataset not found." )
-                            except Exception, e:
-                                log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
-                                raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
-
-                    else:  # simple case
-                        try:
-                            if format == 'zip':
-                                archive.add( ldda.dataset.file_name, path )
-                            else:
-                                archive.add( ldda.dataset.file_name, path, check_file=True )
-                        except IOError:
-                            log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
-                            raise exceptions.InternalServerError( "Unable to create archive for download" )
-                        except ObjectNotFound:
-                            log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                            log.exception( "Requested dataset %s does not exist on the host." % fpath )
                             raise exceptions.ObjectNotFound( "Requested dataset not found." )
                         except Exception, e:
                             log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
-                            raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
-                lname = 'selected_dataset'
-                fname = lname.replace( ' ', '_' ) + '_files'
-                if format == 'zip':
-                    archive.close()
-                    trans.response.set_content_type( "application/octet-stream" )
-                    trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
-                    archive = util.streamball.ZipBall( tmpf, tmpd )
-                    archive.wsgi_status = trans.response.wsgi_status()
-                    archive.wsgi_headeritems = trans.response.wsgi_headeritems()
-                    return archive.stream
-                else:
-                    trans.response.set_content_type( "application/x-tar" )
-                    trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
-                    archive.wsgi_status = trans.response.wsgi_status()
-                    archive.wsgi_headeritems = trans.response.wsgi_headeritems()
-                    return archive.stream
+                            raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
+
+                else:  # simple case
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, path )
+                        else:
+                            archive.add( ldda.dataset.file_name, path, check_file=True )
+                    except IOError:
+                        log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download" )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found." )
+                    except Exception, e:
+                        log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
+                        raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
+            lname = 'selected_dataset'
+            fname = lname.replace( ' ', '_' ) + '_files'
+            if format == 'zip':
+                archive.close()
+                trans.response.set_content_type( "application/octet-stream" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive = util.streamball.ZipBall( tmpf, tmpd )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
+            else:
+                trans.response.set_content_type( "application/x-tar" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
         elif format == 'uncompressed':
             if len(library_datasets) != 1:
                 raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once." )
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/api/library_contents.py
--- a/lib/galaxy/webapps/galaxy/api/library_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/library_contents.py
@@ -146,17 +146,21 @@
         the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``).
 
         :type   library_id: str
-        :param  library_id: encoded id string of the library that contains this item
+        :param  library_id: encoded id string of the library where to create the new item
         :type   payload:    dict
         :param  payload:    dictionary structure containing:
 
             * folder_id:    the parent folder of the new item
-            * create_type:  the type of item to create ('file' or 'folder')
+            * create_type:  the type of item to create ('file', 'folder' or 'collection')
             * from_hda_id:  (optional) the id of an accessible HDA to copy into the
                 library
             * ldda_message: (optional) the new message attribute of the LDDA created
             * extended_metadata: (optional) dub-dictionary containing any extended
                 metadata to associate with the item
+            * link_data_only: (optional) either 'copy_files' (default) or 'link_to_files'
+            * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths'
+            * server_dir: (optional) only if upload_option is 'upload_directory'
+            * filesystem_paths: (optional) only if upload_option is 'upload_paths' and the user is an admin
 
         :rtype:     dict
         :returns:   a dictionary containing the id, name,
@@ -217,11 +221,9 @@
             return output
         else:
             rval = []
-            for k, v in output.items():
+            for v in output.values():
                 if ex_meta_payload is not None:
-                    """
-                    If there is extended metadata, store it, attach it to the dataset, and index it
-                    """
+                    # If there is extended metadata, store it, attach it to the dataset, and index it
                     ex_meta = ExtendedMetadata(ex_meta_payload)
                     trans.sa_session.add( ex_meta )
                     v.extended_metadata = ex_meta
@@ -343,9 +345,9 @@
             trans.sa_session.flush()
 
     def __decode_library_content_id( self, trans, content_id ):
-        if ( len( content_id ) % 16 == 0 ):
+        if len( content_id ) % 16 == 0:
             return 'LibraryDataset', content_id
-        elif ( content_id.startswith( 'F' ) ):
+        elif content_id.startswith( 'F' ):
             return 'LibraryFolder', content_id[ 1: ]
         else:
             raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) )
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -1,8 +1,6 @@
 import logging
 import os
-import tempfile
 import urllib
-import zipfile
 
 from galaxy import datatypes, eggs, model, util, web
 from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user
@@ -43,8 +41,8 @@
             accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
             accepted_filters = []
             for label, val in accepted_filter_labels_and_vals.items():
-               args = { self.key: val }
-               accepted_filters.append( grids.GridColumnFilter( label, args) )
+                args = { self.key: val }
+                accepted_filters.append( grids.GridColumnFilter( label, args) )
             return accepted_filters
 
     # Grid definition
@@ -147,7 +145,7 @@
         trans.response.set_content_type( 'text/plain' )
         exit_code = ""
         try:
-            job = self._get_job_for_dataset( dataset_id )
+            job = self._get_job_for_dataset( trans, dataset_id )
             exit_code = job.exit_code
         except:
             exit_code = "Invalid dataset ID or you are not allowed to access this dataset"
@@ -323,40 +321,38 @@
                     if params.annotation:
                         annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
                         self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
-                    """
                     # This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI
                     # Add or delete extended metadata
-                    if params.extended_metadata:
-                        em_string = params.extended_metadata
-                        if len(em_string):
-                            em_payload = None
-                            try:
-                                em_payload = loads(em_string)
-                            except Exception, e:
-                                message = 'Invalid JSON input'
-                                error = True
-                            if em_payload is not None:
-                                if data is not None:
-                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
-                                    if ex_obj is not None:
-                                        self.unset_item_extended_metadata_obj(trans, data)
-                                        self.delete_extended_metadata(trans, ex_obj)
-                                    ex_obj = self.create_extended_metadata(trans, em_payload)
-                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
-                                    message = "Updated Extended metadata '%s'." % data.name
-                                    status = 'done'
-                                else:
-                                    message = "data not found"
-                                    error = True
-                    else:
-                        if data is not None:
-                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
-                            if ex_obj is not None:
-                                self.unset_item_extended_metadata_obj(trans, data)
-                                self.delete_extended_metadata(trans, ex_obj)
-                        message = "Deleted Extended metadata '%s'." % data.name
-                        status = 'done'
-                    """
+#                    if params.extended_metadata:
+#                        em_string = params.extended_metadata
+#                        if len(em_string):
+#                            em_payload = None
+#                            try:
+#                                em_payload = loads(em_string)
+#                            except Exception, e:
+#                                message = 'Invalid JSON input'
+#                                error = True
+#                            if em_payload is not None:
+#                                if data is not None:
+#                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                                    if ex_obj is not None:
+#                                        self.unset_item_extended_metadata_obj(trans, data)
+#                                        self.delete_extended_metadata(trans, ex_obj)
+#                                    ex_obj = self.create_extended_metadata(trans, em_payload)
+#                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
+#                                    message = "Updated Extended metadata '%s'." % data.name
+#                                    status = 'done'
+#                                else:
+#                                    message = "data not found"
+#                                    error = True
+#                    else:
+#                        if data is not None:
+#                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                            if ex_obj is not None:
+#                                self.unset_item_extended_metadata_obj(trans, data)
+#                                self.delete_extended_metadata(trans, ex_obj)
+#                        message = "Deleted Extended metadata '%s'." % data.name
+#                        status = 'done'
 
                     # If setting metadata previously failed and all required elements have now been set, clear the failed state.
                     if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
@@ -954,13 +950,11 @@
         has_parameter_errors = False
         inherit_chain = hda.source_dataset_chain
         if inherit_chain:
-            job_dataset_association, dataset_association_container_name = inherit_chain[-1]
+            job_dataset_association = inherit_chain[-1][0]
         else:
             job_dataset_association = hda
         if job_dataset_association.creating_job_associations:
-            for assoc in job_dataset_association.creating_job_associations:
-                job = assoc.job
-                break
+            job = job_dataset_association.creating_job_associations[0].job
             if job:
                 # Get the tool object
                 try:
@@ -1024,7 +1018,7 @@
                     trans.sa_session.flush()
                     target_history_ids.append( new_history.id )
                 if user:
-                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )]
+                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ]
                 else:
                     target_histories = [ history ]
                 if len( target_histories ) != len( target_history_ids ):
diff -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e -r a3ebaac5d31258a02bc7f037721f898c2c1e80e3 lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -162,6 +162,7 @@
                                                           default_action=default_action,
                                                           message=util.sanitize_text( message ),
                                                           status=status ) )
+
     @web.expose
     def library_info( self, trans, cntrller, **kwd ):
         params = util.Params( kwd )
@@ -222,6 +223,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def library_permissions( self, trans, cntrller, **kwd ):
         params = util.Params( kwd )
@@ -269,6 +271,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def create_folder( self, trans, cntrller, parent_id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -346,6 +349,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def folder_info( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -403,6 +407,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def folder_permissions( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -453,6 +458,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_edit_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -606,6 +612,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -655,6 +662,7 @@
                                     inherited=inherited,
                                     message=message,
                                     status=status )
+
     @web.expose
     def ldda_permissions( self, trans, cntrller, library_id, folder_id, id, **kwd ):
         params = util.Params( kwd )
@@ -793,6 +801,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def upload_library_dataset( self, trans, cntrller, library_id, folder_id, **kwd ):
         params = util.Params( kwd )
@@ -1042,6 +1051,7 @@
                                     ldda_message=ldda_message,
                                     message=message,
                                     status=status )
+
     def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
         # Set up the traditional tool state/params
         tool_id = 'upload1'
@@ -1132,6 +1142,7 @@
         trans.sa_session.add( job )
         trans.sa_session.flush()
         return output
+
     def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ):
         link_data_only = params.get( 'link_data_only', 'copy_files' )
         uuid_str =  params.get( 'uuid', None )
@@ -1166,6 +1177,7 @@
             trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) )
             trans.sa_session.flush()
         return uploaded_dataset
+
     def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ):
         dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc)
         files = dir_response[0]
@@ -1176,6 +1188,7 @@
             name = os.path.basename( file )
             uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) )
         return uploaded_datasets, 200, None
+
     def _get_server_dir_files( self, params, full_dir, import_dir_desc ):
         files = []
         try:
@@ -1212,6 +1225,7 @@
             response_code = 400
             return None, response_code, message
         return files, None, None
+
     def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ):
         preserve_dirs = util.string_as_bool( params.get( 'preserve_dirs', False ) )
         uploaded_datasets = []
@@ -1246,6 +1260,7 @@
                     in_folder = None
                 files_and_folders.append((file_path, file, in_folder))
         return files_and_folders
+
     def _paths_list(self, params):
         return [ (l.strip(), os.path.abspath(l.strip())) for l in params.filesystem_paths.splitlines() if l.strip() ]
 
@@ -1463,6 +1478,7 @@
                                             ldda_message=ldda_message,
                                             message=message,
                                             status=status )
+
     def _build_roles_select_list( self, trans, cntrller, library, selected_role_ids=[] ):
         # Get the list of legitimate roles to display on the upload form.  If the library is public,
         # all active roles are legitimate.  If the library is restricted by the LIBRARY_ACCESS permission, only
@@ -1478,10 +1494,11 @@
             return roles_select_list
         else:
             return None
+
     def _build_upload_option_select_list( self, trans, upload_option, is_admin, do_not_include_values=[] ):
         # Build the upload_option select list.  The do_not_include_values param can contain options that
         # should not be included in the list.  For example, the 'upload_directory' option should not be
-        # included if uploading a new version of a librar dataset.
+        # included if uploading a new version of a library dataset.
         upload_refresh_on_change_values = []
         for option_value, option_label in trans.model.LibraryDataset.upload_options:
             if option_value not in do_not_include_values:
@@ -1508,6 +1525,7 @@
                         continue
                 upload_option_select_list.add_option( option_label, option_value, selected=option_value==upload_option )
         return upload_option_select_list
+
     def _get_populated_widgets( self, folder ):
         # See if we have any inherited templates.
         info_association, inherited = folder.get_info_association( inherited=True )
@@ -1517,6 +1535,7 @@
             return self.populate_widgets_from_kwd( trans, widgets, **kwd )
         else:
             return []
+
     @web.expose
     def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ):
         """Catches the dataset id and displays file contents as directed"""
@@ -1557,6 +1576,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status='error' ) )
+
     @web.expose
     def library_dataset_info( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -1606,6 +1626,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def library_dataset_permissions( self, trans, cntrller, id, library_id, **kwd ):
         params = util.Params( kwd )
@@ -1654,6 +1675,7 @@
                                     show_deleted=show_deleted,
                                     message=message,
                                     status=status )
+
     @web.expose
     def make_library_item_public( self, trans, cntrller, library_id, item_type, id, **kwd ):
         params = util.Params( kwd )
@@ -1696,6 +1718,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status=status ) )
+
     @web.expose
     def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ):
         # This method is called from 1 of 3 places:
@@ -2113,6 +2136,7 @@
                                     use_panels=use_panels,
                                     message=message,
                                     status=status )
+
     @web.expose
     def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ):
         params = util.Params( kwd )
@@ -2159,6 +2183,7 @@
                                                           show_deleted=show_deleted,
                                                           message=util.sanitize_text( message ),
                                                           status='done' ) )
+
     @web.expose
     def move_library_item( self, trans, cntrller, item_type, item_id, source_library_id='', make_target_current=True, **kwd ):
         # This method is called from one of the following places:
@@ -2374,6 +2399,7 @@
                                     use_panels=use_panels,
                                     message=message,
                                     status=status )
+
     @web.expose
     def delete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
         # This action will handle deleting all types of library items.  State is saved for libraries and
@@ -2441,6 +2467,7 @@
                                                               show_deleted=show_deleted,
                                                               message=message,
                                                               status=status ) )
+
     @web.expose
     def undelete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
         # This action will handle undeleting all types of library items
@@ -2509,6 +2536,7 @@
                                                               show_deleted=show_deleted,
                                                               message=message,
                                                               status=status ) )
+
     def _check_access( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         can_access = True
         if isinstance( item, trans.model.HistoryDatasetAssociation ):
@@ -2551,6 +2579,7 @@
                                                               show_deleted=show_deleted,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_add( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
         if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, item ) ):
@@ -2566,6 +2595,7 @@
                                                               show_deleted=show_deleted,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_manage( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         if isinstance( item, trans.model.LibraryDataset ):
             # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE and DATASET_MANAGE_PERMISSIONS permissions.
@@ -2594,6 +2624,7 @@
                                                               use_panels=use_panels,
                                                               message=util.sanitize_text( message ),
                                                               status='error' ) )
+
     def _check_modify( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
         # Deny modification if the user is not an admin and does not have the LIBRARY_MODIFY permission.
         if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
@@ -2619,6 +2650,7 @@
                            .options( eagerload_all( "actions" ) ) \
                            .order_by( trans.app.model.LibraryFolder.table.c.name ) \
                            .all()
+
 def activatable_folders( trans, folder ):
     return trans.sa_session.query( trans.app.model.LibraryFolder ) \
                            .filter_by( parent=folder, purged=False ) \
@@ -2685,6 +2717,7 @@
     if folder.parent:
         return branch_deleted( folder.parent )
     return False
+
 def get_containing_library_from_library_dataset( trans, library_dataset ):
     """Given a library_dataset, get the containing library"""
     folder = library_dataset.folder
@@ -2698,6 +2731,7 @@
         if library.root_folder == folder:
             return library
     return None
+
 def get_comptypes( trans ):
     comptypes_t = comptypes
     if trans.app.config.nginx_x_archive_files_base:
@@ -2710,6 +2744,7 @@
         except:
             pass
     return comptypes_t
+
 def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ):
     is_admin = trans.user_is_admin() and cntrller == 'library_admin'
     if is_admin:
@@ -2723,6 +2758,7 @@
                 accessible_items.append( item )
     # Sort by name
     return sort_by_attr( [ item for item in accessible_items ], sort_attr )
+
 def sort_by_attr( seq, attr ):
     """
     Sort the sequence of objects by object's attribute
@@ -2738,6 +2774,7 @@
     intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
     intermed.sort()
     return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
+
 def lucene_search( trans, cntrller, search_term, search_url, **kwd ):
     """Return display of results from a full-text lucene search of data libraries."""
     params = util.Params( kwd )
@@ -2749,6 +2786,7 @@
     response.close()
     lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ]
     return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' )
+
 def whoosh_search( trans, cntrller, search_term, **kwd ):
     """Return display of results from a full-text whoosh search of data libraries."""
     params = util.Params( kwd )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    
                        
                            
                                
                            
                            commit/galaxy-central: carlfeberhard: Active history content: change style as suggested
                        
                        
by commits-noreply@bitbucket.org 10 Nov '14
                    by commits-noreply@bitbucket.org 10 Nov '14
10 Nov '14
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/84b3e72b8dcf/
Changeset:   84b3e72b8dcf
User:        carlfeberhard
Date:        2014-11-10 15:50:03+00:00
Summary:     Active history content: change style as suggested
Affected #:  2 files
diff -r 5f9b606ca882e8db038bd5ed67f124d8537cbe01 -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1980,9 +1980,7 @@
 .annotated-history-panel table.list-items>tbody>tr>td:nth-child(2){padding:8px 16px 8px 16px;white-space:pre-wrap}
 .annotated-history-panel table.list-items>tbody>tr>td>.list-item{border:0px}
 .annotated-history-panel .empty-message{margin-top:8px}
-.current-history-panel .list-item.history-content.current-content{border:2px solid #333;border-bottom-width:2px}.current-history-panel .list-item.history-content.current-content .primary-actions{margin:5px 8px 0px 0px}
-.current-history-panel .list-item.history-content.current-content .title-bar{padding:5px 8px 5px 6px}
-.current-history-panel .list-item.history-content.current-content .details{padding:0px 8px 5px 6px}
+.current-history-panel .list-item.history-content.current-content{border-left:5px solid #4E5777}
 body.historyPage{background:#dfe5f9;color:#000;margin:5px;border:0;padding:0}
 div.historyLinks{margin:5px 5px}
 div.historyItem{margin:0 -5px;padding:8px 10px;border-top:solid #bfbfbf 1px;border-right:none;word-wrap:break-word;background:#eee}div.historyItem .state-icon{display:inline-block;vertical-align:middle;width:16px;height:16px;background-position:0 1px;background-repeat:no-repeat}
diff -r 5f9b606ca882e8db038bd5ed67f124d8537cbe01 -r 84b3e72b8dcf5ce9f10040024459872fe7e6f98e static/style/src/less/history.less
--- a/static/style/src/less/history.less
+++ b/static/style/src/less/history.less
@@ -595,19 +595,11 @@
 
 // ---------------------------------------------------------------------------- current-history-panel
 .current-history-panel {
+    // .current-content is added to dataset/collection when displayed/edited/visualized, etc.
+    //  (roughly: when it's being used in the center panel)
     .list-item.history-content.current-content {
-
-        border: 2px solid #333;
-        border-bottom-width: 2px;
-        .primary-actions {
-            margin: 5px 8px 0px 0px;
-        }
-        .title-bar {
-            padding: 5px 8px 5px 6px;
-        }
-        .details {
-            padding: 0px 8px 5px 6px;
-        }
+        //TODO: Galaxy's 'bs-primary' color - not sure where this is defined
+        border-left: 5px solid #4E5777;
     }
 }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    
                        
                            
                                
                            
                            commit/galaxy-central: natefoo: Update tag latest_2014.10.06 for changeset a1dca14d5b1a
                        
                        
by commits-noreply@bitbucket.org 10 Nov '14
                    by commits-noreply@bitbucket.org 10 Nov '14
10 Nov '14
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a16007e4df83/
Changeset:   a16007e4df83
Branch:      stable
User:        natefoo
Date:        2014-11-10 14:00:34+00:00
Summary:     Update tag latest_2014.10.06 for changeset a1dca14d5b1a
Affected #:  1 file
diff -r a1dca14d5b1afbf2b5bde192e3e6b6763836eff8 -r a16007e4df83c21ff1d9218b3892fae783742fb8 .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -20,4 +20,4 @@
 ca45b78adb4152fc6e7395514d46eba6b7d0b838 release_2014.08.11
 548ab24667d6206780237bd807f7d857a484c461 latest_2014.08.11
 2092948937ac30ef82f71463a235c66d34987088 release_2014.10.06
-9b8cb8d48a798e8905d09e380138c1102012ce6f latest_2014.10.06
+a1dca14d5b1afbf2b5bde192e3e6b6763836eff8 latest_2014.10.06
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    
                        
                            
                                
                            
                            commit/galaxy-central: natefoo: Memory limit detection. This code is from default and is a partial graft. It
                        
                        
by commits-noreply@bitbucket.org 08 Nov '14
                    by commits-noreply@bitbucket.org 08 Nov '14
08 Nov '14
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a1dca14d5b1a/
Changeset:   a1dca14d5b1a
Branch:      stable
User:        natefoo
Date:        2014-11-07 20:41:27+00:00
Summary:     Memory limit detection. This code is from default and is a partial graft. It
includes the code for detecting if a Slurm job was killed due to exceeding
memory limits but does not include resubmit functionality.
Affected #:  2 files
diff -r 83f821c5ecc1e0ac4bbdc6710c6e7abb7faf6afa -r a1dca14d5b1afbf2b5bde192e3e6b6763836eff8 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -337,6 +337,7 @@
     """
     runner_states = Bunch(
         WALLTIME_REACHED = 'walltime_reached',
+        MEMORY_LIMIT_REACHED = 'memory_limit_reached',
         GLOBAL_WALLTIME_REACHED = 'global_walltime_reached',
         OUTPUT_SIZE_LIMIT = 'output_size_limit'
     )
diff -r 83f821c5ecc1e0ac4bbdc6710c6e7abb7faf6afa -r a1dca14d5b1afbf2b5bde192e3e6b6763836eff8 lib/galaxy/jobs/runners/slurm.py
--- a/lib/galaxy/jobs/runners/slurm.py
+++ b/lib/galaxy/jobs/runners/slurm.py
@@ -13,6 +13,8 @@
 
 __all__ = [ 'SlurmJobRunner' ]
 
+SLURM_MEMORY_LIMIT_EXCEEDED_MSG = 'slurmstepd: error: Exceeded job memory limit'
+
 
 class SlurmJobRunner( DRMAAJobRunner ):
     runner_name = "SlurmRunner"
@@ -62,8 +64,14 @@
                     except:
                         ajs.fail_message = "This job failed due to a cluster node failure, and an attempt to resubmit the job failed."
                 elif job_info['JobState'] == 'CANCELLED':
-                    log.info( '(%s/%s) Job was cancelled via slurm (e.g. with scancel(1))', ajs.job_wrapper.get_id_tag(), ajs.job_id )
-                    ajs.fail_message = "This job failed because it was cancelled by an administrator."
+                    # Check to see if the job was killed for exceeding memory consumption
+                    if self.__check_memory_limit( ajs.error_file ):
+                        log.info( '(%s/%s) Job hit memory limit', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                        ajs.fail_message = "This job was terminated because it used more memory than it was allocated."
+                        ajs.runner_state = ajs.runner_states.MEMORY_LIMIT_REACHED
+                    else:
+                        log.info( '(%s/%s) Job was cancelled via slurm (e.g. with scancel(1))', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                        ajs.fail_message = "This job failed because it was cancelled by an administrator."
                 else:
                     log.warning( '(%s/%s) Job failed due to unknown reasons, JobState was: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, job_info['JobState'] )
                     ajs.fail_message = "This job failed for reasons that could not be determined."
@@ -77,3 +85,31 @@
                 super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state )
         # by default, finish as if the job was successful.
         super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state )
+
+    def __check_memory_limit( self, efile_path ):
+        """
+        A very poor implementation of tail, but it doesn't need to be fancy
+        since we are only searching the last 2K
+        """
+        try:
+            log.debug( 'Checking %s for exceeded memory message from slurm', efile_path )
+            with open( efile_path ) as f:
+                pos = 2
+                bof = False
+                while pos < 2048:
+                    try:
+                        f.seek(-pos, 2)
+                        pos += 1
+                    except:
+                        f.seek(-pos + 1, 2)
+                        bof = True
+
+                    if (bof or f.read(1) == '\n') and f.readline().strip() == SLURM_MEMORY_LIMIT_EXCEEDED_MSG:
+                        return True
+
+                    if bof:
+                        break
+        except:
+            log.exception('Error reading end of %s:', path)
+
+        return False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ec3d0628e011/
Changeset:   ec3d0628e011
Branch:      fix-tooltip
User:        dannon
Date:        2014-11-08 01:01:56+00:00
Summary:     Branch prune.
Affected #:  0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    
                        
                            
                                
                            
                            commit/galaxy-central: dannon: Merged in kellrott/galaxy-farm/fix-tooltip (pull request #544)
                        
                        
by commits-noreply@bitbucket.org 08 Nov '14
                    by commits-noreply@bitbucket.org 08 Nov '14
08 Nov '14
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5f9b606ca882/
Changeset:   5f9b606ca882
User:        dannon
Date:        2014-11-08 00:53:38+00:00
Summary:     Merged in kellrott/galaxy-farm/fix-tooltip (pull request #544)
Adding fake host_url to tooltip rendering
Affected #:  1 file
diff -r 93723d1cf699eab771ccde086d34bec1fd1c22e6 -r 5f9b606ca882e8db038bd5ed67f124d8537cbe01 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -13,6 +13,7 @@
 import galaxy.tools
 from galaxy import exceptions
 from galaxy import model
+from galaxy import web
 from galaxy.dataset_collections import matching
 from galaxy.web.framework import formbuilder
 from galaxy.jobs.actions.post import ActionBox
@@ -337,7 +338,7 @@
         type_hints[ "list" ] = "List of Datasets"
         type_hints[ "paired" ] = "Dataset Pair"
         type_hints[ "list:paired" ] = "List of Dataset Pairs"
-        
+
         type_input = formbuilder.DatalistInput(
             name="collection_type",
             label="Collection Type",
@@ -486,7 +487,7 @@
 
     def get_tooltip( self, static_path='' ):
         if self.tool.help:
-            return self.tool.help.render( static_path=static_path )
+            return self.tool.help.render( host_url=web.url_for('/'), static_path=static_path )
         else:
             return None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ef5d3a6775d2/
Changeset:   ef5d3a6775d2
Branch:      fix-tooltip
User:        kell...(a)gmail.com
Date:        2014-10-31 04:14:05+00:00
Summary:     Adding fake host_url to tooltip rendering to stop certain tools from crashing the tool tip renderer
Affected #:  1 file
diff -r 06970f2cfee91d60b981308483d56a9ebb3afb55 -r ef5d3a6775d2b8616e814bbfe326f89e0192b910 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -337,7 +337,7 @@
         type_hints[ "list" ] = "List of Datasets"
         type_hints[ "paired" ] = "Dataset Pair"
         type_hints[ "list:paired" ] = "List of Dataset Pairs"
-        
+
         type_input = formbuilder.DatalistInput(
             name="collection_type",
             label="Collection Type",
@@ -486,7 +486,7 @@
 
     def get_tooltip( self, static_path='' ):
         if self.tool.help:
-            return self.tool.help.render( static_path=static_path )
+            return self.tool.help.render( host_url='', static_path=static_path )
         else:
             return None
 
https://bitbucket.org/galaxy/galaxy-central/commits/94446d6469b1/
Changeset:   94446d6469b1
Branch:      fix-tooltip
User:        kellrott
Date:        2014-11-07 22:18:32+00:00
Summary:     Putting in URL to current host for tooltip host_url variable
Affected #:  1 file
diff -r ef5d3a6775d2b8616e814bbfe326f89e0192b910 -r 94446d6469b10a8c08479fa33ccac2b2a950484d lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -13,6 +13,7 @@
 import galaxy.tools
 from galaxy import exceptions
 from galaxy import model
+from galaxy import web
 from galaxy.dataset_collections import matching
 from galaxy.web.framework import formbuilder
 from galaxy.jobs.actions.post import ActionBox
@@ -486,7 +487,7 @@
 
     def get_tooltip( self, static_path='' ):
         if self.tool.help:
-            return self.tool.help.render( host_url='', static_path=static_path )
+            return self.tool.help.render( host_url=web.url_for('/'), static_path=static_path )
         else:
             return None
 
https://bitbucket.org/galaxy/galaxy-central/commits/5f9b606ca882/
Changeset:   5f9b606ca882
User:        dannon
Date:        2014-11-08 00:53:38+00:00
Summary:     Merged in kellrott/galaxy-farm/fix-tooltip (pull request #544)
Adding fake host_url to tooltip rendering
Affected #:  1 file
diff -r 93723d1cf699eab771ccde086d34bec1fd1c22e6 -r 5f9b606ca882e8db038bd5ed67f124d8537cbe01 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -13,6 +13,7 @@
 import galaxy.tools
 from galaxy import exceptions
 from galaxy import model
+from galaxy import web
 from galaxy.dataset_collections import matching
 from galaxy.web.framework import formbuilder
 from galaxy.jobs.actions.post import ActionBox
@@ -337,7 +338,7 @@
         type_hints[ "list" ] = "List of Datasets"
         type_hints[ "paired" ] = "Dataset Pair"
         type_hints[ "list:paired" ] = "List of Dataset Pairs"
-        
+
         type_input = formbuilder.DatalistInput(
             name="collection_type",
             label="Collection Type",
@@ -486,7 +487,7 @@
 
     def get_tooltip( self, static_path='' ):
         if self.tool.help:
-            return self.tool.help.render( static_path=static_path )
+            return self.tool.help.render( host_url=web.url_for('/'), static_path=static_path )
         else:
             return None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                    
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ad21172bc346/
Changeset:   ad21172bc346
Branch:      workflow-input-fix
User:        dannon
Date:        2014-11-08 00:50:52+00:00
Summary:     Branch prune.
Affected #:  0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0
                            
                          
                          
                            
    
                          
                        
                     
                        
                    
                        
                            
                                
                            
                            commit/galaxy-central: dannon: Merged in kellrott/galaxy-farm/workflow-input-fix (pull request #552)
                        
                        
by commits-noreply@bitbucket.org 08 Nov '14
                    by commits-noreply@bitbucket.org 08 Nov '14
08 Nov '14
                    
                        1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/93723d1cf699/
Changeset:   93723d1cf699
User:        dannon
Date:        2014-11-08 00:48:15+00:00
Summary:     Merged in kellrott/galaxy-farm/workflow-input-fix (pull request #552)
Fixing workflow import to correctly set the uuid
Affected #:  1 file
diff -r 04644e64c498ca4eacbfa1c6df26cb546705b96c -r 93723d1cf699eab771ccde086d34bec1fd1c22e6 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -1596,6 +1596,8 @@
         else:
             name = data['name']
         workflow.name = name
+        if 'uuid' in data:
+            workflow.uuid = data['uuid']
         # Assume no errors until we find a step that has some
         workflow.has_errors = False
         # Create each step
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
                    
                  
                  
                          
                            
                            1
                            
                          
                          
                            
                            0