galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
November 2013
- 1 participants
- 208 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9bde6f2b9c59/
Changeset: 9bde6f2b9c59
Branch: stable
User: Dave Bouvier
Date: 2013-11-13 20:16:29
Summary: Fix for downloading jar files in tool dependencies.
Affected #: 1 file
diff -r 8b60ec04bea623eeb7ba3b370302cf4e123119d7 -r 9bde6f2b9c59a29a35b85b7fe3c2e3bef3ae6a2e lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
@@ -430,7 +430,7 @@
if dst:
dst.close()
if extract:
- if istar( file_path ) or iszip( file_path ):
+ if istar( file_path ) or ( iszip( file_path ) and not isjar( file_path ) ):
archive = CompressedFile( file_path )
extraction_path = archive.extract( install_dir )
else:
https://bitbucket.org/galaxy/galaxy-central/commits/e6b3faf87f02/
Changeset: e6b3faf87f02
User: Dave Bouvier
Date: 2013-11-13 20:18:36
Summary: Merge in fix from stable.
Affected #: 1 file
diff -r 846962cad9648e2a76ba4bd2d6f85079ec78056c -r e6b3faf87f026f03412ff7b1b133fa0872c8be45 lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
@@ -430,7 +430,7 @@
if dst:
dst.close()
if extract:
- if istar( file_path ) or iszip( file_path ):
+ if istar( file_path ) or ( iszip( file_path ) and not isjar( file_path ) ):
archive = CompressedFile( file_path )
extraction_path = archive.extract( install_dir )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: More fixes for recent changes to the datatypes registry.
by commits-noreply@bitbucket.org 13 Nov '13
by commits-noreply@bitbucket.org 13 Nov '13
13 Nov '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/846962cad964/
Changeset: 846962cad964
User: Dave Bouvier
Date: 2013-11-13 20:11:54
Summary: More fixes for recent changes to the datatypes registry.
Affected #: 1 file
diff -r 25a48c98802a8552fc6fac4a5c719a6c1839173a -r 846962cad9648e2a76ba4bd2d6f85079ec78056c lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -85,6 +85,13 @@
return imported_module
if root_dir and config:
+ # If handling_proprietary_datatypes is determined as True below, we'll have an elem that looks something like this:
+ # <datatype display_in_upload="true"
+ # extension="blastxml"
+ # mimetype="application/xml"
+ # proprietary_datatype_module="blast"
+ # proprietary_path="[cloned repository path]"
+ # type="galaxy.datatypes.blast:BlastXml" />
handling_proprietary_datatypes = False
# Parse datatypes_conf.xml
tree = galaxy.util.parse_xml( config )
@@ -117,9 +124,13 @@
type_extension = elem.get( 'type_extension', None )
mimetype = elem.get( 'mimetype', None )
display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
+ # If make_subclass is True, it does not necessarily imply that we are subclassing a datatype that is contained
+ # in the distribution.
make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
# Proprietary datatypes included in installed tool shed repositories will include two special attributes
# (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
+ # The value of proprietary_path is the path to the cloned location of the tool shed repository's contained
+ # datatypes_conf.xml file.
proprietary_path = elem.get( 'proprietary_path', None )
proprietary_datatype_module = elem.get( 'proprietary_datatype_module', None )
if proprietary_path is not None or proprietary_datatype_module is not None and not handling_proprietary_datatypes:
@@ -182,21 +193,22 @@
except Exception, e:
full_path = os.path.join( proprietary_path, proprietary_datatype_module )
self.log.debug( "Exception importing proprietary code file %s: %s" % ( str( full_path ), str( e ) ) )
- ok = False
finally:
lock.release()
- if ok:
- if datatype_class is None:
- try:
- # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
- fields = datatype_module.split( '.' )
- module = __import__( fields.pop( 0 ) )
- for mod in fields:
- module = getattr( module, mod )
- datatype_class = getattr( module, datatype_class_name )
- except Exception, e:
- self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
- ok = False
+ # Either the above exception was thrown because the proprietary_datatype_module is not derived from a class
+ # in the repository, or we are loading Galaxy's datatypes. In either case we'll look in the registry.
+ if datatype_class is None:
+ try:
+ # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
+ fields = datatype_module.split( '.' )
+ module = __import__( fields.pop( 0 ) )
+ for mod in fields:
+ module = getattr( module, mod )
+ datatype_class = getattr( module, datatype_class_name )
+ self.log.debug( 'Retrieved datatype module %s from the datatype registry.' % str( datatype_module ) )
+ except Exception, e:
+ self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
+ ok = False
elif type_extension is not None:
try:
datatype_class = self.datatypes_by_extension[ type_extension ].__class__
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for recent changes to the datatypes registry.
by commits-noreply@bitbucket.org 13 Nov '13
by commits-noreply@bitbucket.org 13 Nov '13
13 Nov '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/25a48c98802a/
Changeset: 25a48c98802a
User: greg
Date: 2013-11-13 17:50:27
Summary: Fixes for recent changes to the datatypes registry.
Affected #: 1 file
diff -r 17ee3fa154b35f856a79be1691c5ed60832618c6 -r 25a48c98802a8552fc6fac4a5c719a6c1839173a lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -72,16 +72,18 @@
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
- Parse a datatypes XML file located at root_dir/config. If deactivate is True, an installed tool shed
- repository that includes proprietary datatypes is being deactivated, so appropriate loaded datatypes
- will be removed from the registry. The value of override will be False when a tool shed repository is
- being installed. Since installation is occurring after the datatypes registry has been initialized, its
- contents cannot be overridden by new introduced conflicting data types.
+ Parse a datatypes XML file located at root_dir/config (if processing the Galaxy distributed config) or contained within
+ an installed Tool Shed repository. If deactivate is True, an installed Tool Shed repository that includes custom datatypes
+ is being deactivated or uninstalled, so appropriate loaded datatypes will be removed from the registry. The value of
+ override will be False when a Tool Shed repository is being installed. Since installation is occurring after the datatypes
+ registry has been initialized at server startup, it's contents cannot be overridden by newly introduced conflicting data types.
"""
+
def __import_module( full_path, datatype_module, datatype_class_name ):
open_file_obj, file_name, description = imp.find_module( datatype_module, [ full_path ] )
imported_module = imp.load_module( datatype_class_name, open_file_obj, file_name, description )
return imported_module
+
if root_dir and config:
handling_proprietary_datatypes = False
# Parse datatypes_conf.xml
@@ -105,9 +107,11 @@
# Proprietary datatype's <registration> tag may have special attributes, proprietary_converter_path and proprietary_display_path.
proprietary_converter_path = registration.get( 'proprietary_converter_path', None )
proprietary_display_path = registration.get( 'proprietary_display_path', None )
- if proprietary_converter_path or proprietary_display_path and not handling_proprietary_datatypes:
+ if proprietary_converter_path is not None or proprietary_display_path is not None and not handling_proprietary_datatypes:
handling_proprietary_datatypes = True
for elem in registration.findall( 'datatype' ):
+ # Keep a status of the process steps to enable stopping the process of handling the datatype if necessary.
+ ok = True
extension = elem.get( 'extension', None )
dtype = elem.get( 'type', None )
type_extension = elem.get( 'type_extension', None )
@@ -118,11 +122,11 @@
# (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
proprietary_path = elem.get( 'proprietary_path', None )
proprietary_datatype_module = elem.get( 'proprietary_datatype_module', None )
- if proprietary_path or proprietary_datatype_module and not handling_proprietary_datatypes:
+ if proprietary_path is not None or proprietary_datatype_module is not None and not handling_proprietary_datatypes:
handling_proprietary_datatypes = True
if deactivate:
- # We are deactivating an installed tool shed repository, so eliminate the
- # datatype elem from the in-memory list of datatype elems.
+ # We are deactivating or uninstalling an installed tool shed repository, so eliminate the datatype
+ # elem from the in-memory list of datatype elems.
for in_memory_elem in self.datatype_elems:
in_memory_extension = in_memory_elem.get( 'extension', None )
if in_memory_extension == extension:
@@ -131,30 +135,33 @@
in_memory_mimetype = elem.get( 'mimetype', None )
in_memory_display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
in_memory_make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
- if in_memory_dtype == dtype and in_memory_type_extension == type_extension and in_memory_mimetype == mimetype \
- and in_memory_display_in_upload == display_in_upload and in_memory_make_subclass == make_subclass:
+ if in_memory_dtype == dtype and \
+ in_memory_type_extension == type_extension and \
+ in_memory_mimetype == mimetype and \
+ in_memory_display_in_upload == display_in_upload and \
+ in_memory_make_subclass == make_subclass:
self.datatype_elems.remove( in_memory_elem )
- if extension and extension in self.datatypes_by_extension:
- # We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
- # TODO: Handle deactivating datatype converters, etc before removing from self.datatypes_by_extension.
- self.log.debug( "Removing datatype with extension '%s' from the registry." % extension )
+ if extension is not None and extension in self.datatypes_by_extension:
+ # We are deactivating or uninstalling an installed tool shed repository, so eliminate the datatype
+ # from the registry. TODO: Handle deactivating datatype converters, etc before removing from
+ # self.datatypes_by_extension.
del self.datatypes_by_extension[ extension ]
if extension in self.upload_file_formats:
self.upload_file_formats.remove( extension )
- can_process_datatype = False
+ self.log.debug( "Removed datatype with extension '%s' from the registry." % extension )
else:
- # We are loading new datatypes. Keep an in-memory list of datatype elems to enable persistence.
- if extension not in self.datatypes_by_extension:
- self.datatype_elems.append( elem )
- can_process_datatype = ( extension and ( dtype or type_extension ) ) and \
- ( extension not in self.datatypes_by_extension or override )
+ # We are loading new datatype, so we'll make sure it is correctly defined before proceeding.
+ can_process_datatype = False
+ if extension is not None:
+ if dtype is not None or type_extension is not None:
+ if override or extension not in self.datatypes_by_extension:
+ can_process_datatype = True
if can_process_datatype:
- ok = True
- if dtype:
+ if dtype is not None:
try:
fields = dtype.split( ':' )
- datatype_module = fields[0]
- datatype_class_name = fields[1]
+ datatype_module = fields[ 0 ]
+ datatype_class_name = fields[ 1 ]
except Exception, e:
self.log.exception( 'Error parsing datatype definition for dtype %s: %s' % ( str( dtype ), str( e ) ) )
ok = False
@@ -175,115 +182,141 @@
except Exception, e:
full_path = os.path.join( proprietary_path, proprietary_datatype_module )
self.log.debug( "Exception importing proprietary code file %s: %s" % ( str( full_path ), str( e ) ) )
+ ok = False
finally:
lock.release()
- if datatype_class is None:
- try:
- # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
- fields = datatype_module.split( '.' )
- module = __import__( fields.pop(0) )
- for mod in fields:
- module = getattr( module, mod )
- datatype_class = getattr( module, datatype_class_name )
- except Exception, e:
- self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
- elif type_extension:
- datatype_class = self.datatypes_by_extension[ type_extension ].__class__
- if not deactivate:
- # A new tool shed repository that contains proprietary datatypes is being installed, and since installation
- # is occurring after the datatypes registry has been initialized, its contents cannot be overridden by new
- # introduced conflicting data types.
- if make_subclass:
- datatype_class = type( datatype_class_name, ( datatype_class, ), {} )
- if extension in self.datatypes_by_extension:
- self.log.warning( "Overriding conflicting datatype with extension '%s', using datatype from %s." % ( extension, config ) )
- self.datatypes_by_extension[ extension ] = datatype_class()
- if mimetype is None:
- # Use default mime type as per datatype spec
- mimetype = self.datatypes_by_extension[ extension ].get_mime()
- self.mimetypes_by_extension[ extension ] = mimetype
- if datatype_class.track_type:
- self.available_tracks.append( extension )
- if display_in_upload and extension not in self.upload_file_formats:
- self.upload_file_formats.append( extension )
- # Max file size cut off for setting optional metadata
- self.datatypes_by_extension[ extension ].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
- for converter in elem.findall( 'converter' ):
- # Build the list of datatype converters which will later be loaded into the calling app's toolbox.
- converter_config = converter.get( 'file', None )
- target_datatype = converter.get( 'target_datatype', None )
- depends_on = converter.get( 'depends_on', None )
- if depends_on and target_datatype:
- if extension not in self.converter_deps:
- self.converter_deps[extension] = {}
- self.converter_deps[extension][target_datatype] = depends_on.split(',')
- if converter_config and target_datatype:
- #if imported_modules:
- if proprietary_converter_path:
- self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+ if ok:
+ if datatype_class is None:
+ try:
+ # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
+ fields = datatype_module.split( '.' )
+ module = __import__( fields.pop( 0 ) )
+ for mod in fields:
+ module = getattr( module, mod )
+ datatype_class = getattr( module, datatype_class_name )
+ except Exception, e:
+ self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
+ ok = False
+ elif type_extension is not None:
+ try:
+ datatype_class = self.datatypes_by_extension[ type_extension ].__class__
+ except Exception, e:
+ self.log.exception( 'Error determining datatype_class for type_extension %s: %s' % ( str( type_extension ), str( e ) ) )
+ ok = False
+ if ok:
+ if not deactivate:
+ # A new tool shed repository that contains custom datatypes is being installed, and since installation is
+ # occurring after the datatypes registry has been initialized at server startup, its contents cannot be
+ # overridden by new introduced conflicting data types unless the value of override is True.
+ if extension in self.datatypes_by_extension:
+ # Because of the way that the value of can_process_datatype was set above, we know that the value of
+ # override is True.
+ self.log.warning( "Overriding conflicting datatype with extension '%s', using datatype from %s." % \
+ ( str( extension ), str( config ) ) )
+ if make_subclass:
+ datatype_class = type( datatype_class_name, ( datatype_class, ), {} )
+ self.datatypes_by_extension[ extension ] = datatype_class()
+ if mimetype is None:
+ # Use default mimetype per datatype specification.
+ mimetype = self.datatypes_by_extension[ extension ].get_mime()
+ self.mimetypes_by_extension[ extension ] = mimetype
+ if datatype_class.track_type:
+ self.available_tracks.append( extension )
+ if display_in_upload and extension not in self.upload_file_formats:
+ self.upload_file_formats.append( extension )
+ # Max file size cut off for setting optional metadata.
+ self.datatypes_by_extension[ extension ].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ for converter in elem.findall( 'converter' ):
+ # Build the list of datatype converters which will later be loaded into the calling app's toolbox.
+ converter_config = converter.get( 'file', None )
+ target_datatype = converter.get( 'target_datatype', None )
+ depends_on = converter.get( 'depends_on', None )
+ if depends_on is not None and target_datatype is not None:
+ if extension not in self.converter_deps:
+ self.converter_deps[ extension ] = {}
+ self.converter_deps[ extension ][ target_datatype ] = depends_on.split( ',' )
+ if converter_config and target_datatype:
+ if proprietary_converter_path:
+ self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+ else:
+ self.converters.append( ( converter_config, extension, target_datatype ) )
+ # Add composite files.
+ for composite_file in elem.findall( 'composite_file' ):
+ name = composite_file.get( 'name', None )
+ if name is None:
+ self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+ optional = composite_file.get( 'optional', False )
+ mimetype = composite_file.get( 'mimetype', None )
+ self.datatypes_by_extension[ extension ].add_composite_file( name, optional=optional, mimetype=mimetype )
+ for display_app in elem.findall( 'display' ):
+ if proprietary_display_path:
+ if elem not in self.proprietary_display_app_containers:
+ self.proprietary_display_app_containers.append( elem )
else:
- self.converters.append( ( converter_config, extension, target_datatype ) )
- for composite_file in elem.findall( 'composite_file' ):
- # add composite files
- name = composite_file.get( 'name', None )
- if name is None:
- self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
- optional = composite_file.get( 'optional', False )
- mimetype = composite_file.get( 'mimetype', None )
- self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
- for display_app in elem.findall( 'display' ):
- #if imported_modules:
- if proprietary_display_path:
- if elem not in self.proprietary_display_app_containers:
- self.proprietary_display_app_containers.append( elem )
- else:
- if elem not in self.display_app_containers:
- self.display_app_containers.append( elem )
- elif ( extension and ( dtype or type_extension ) ) and ( extension in self.datatypes_by_extension and not override ):
- self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
- # Load datatype sniffers from the config
+ if elem not in self.display_app_containers:
+ self.display_app_containers.append( elem )
+ # Processing the new datatype elem is now complete, so make sure the element defining it is retained by appending
+ # the new datatype to the in-memory list of datatype elems to enable persistence.
+ self.datatype_elems.append( elem )
+ else:
+ if extension is not None:
+ if dtype is not None or type_extension is not None:
+ if extension in self.datatypes_by_extension:
+ if not override:
+ # Do not load the datatype since it conflicts with an existing datatype which we are not supposed
+ # to override.
+ self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
+ # Load datatype sniffers from the config - we'll do this even if one or more datatypes were not properly processed in the config
+ # since sniffers are not tightly coupled with datatypes.
self.load_datatype_sniffers( root,
deactivate=deactivate,
handling_proprietary_datatypes=handling_proprietary_datatypes,
override=override )
self.upload_file_formats.sort()
- # Persist the xml form of the registry into a temporary file so that it
- # can be loaded from the command line by tools and set_metadata processing.
+ # Persist the xml form of the registry into a temporary file so that it can be loaded from the command line by tools and
+ # set_metadata processing.
self.to_xml_file()
self.set_default_values()
def append_to_sniff_order():
# Just in case any supported data types are not included in the config's sniff_order section.
for ext in self.datatypes_by_extension:
- datatype = self.datatypes_by_extension[ext]
+ datatype = self.datatypes_by_extension[ ext ]
included = False
for atype in self.sniff_order:
- if isinstance(atype, datatype.__class__):
+ if isinstance( atype, datatype.__class__ ):
included = True
break
if not included:
- self.sniff_order.append(datatype)
+ self.sniff_order.append( datatype )
append_to_sniff_order()
def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
- # Load datatype sniffers from the received XML config
+ """
+ Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
+ distributed config) or contained within an installed Tool Shed repository. If deactivate is True, an installed Tool
+ Shed repository that includes custom sniffers is being deactivated or uninstalled, so appropriate loaded sniffers will
+ be removed from the registry. The value of override will be False when a Tool Shed repository is being installed.
+ Since installation is occurring after the datatypes registry has been initialized at server startup, it's contents
+ cannot be overridden by newly introduced conflicting sniffers.
+ """
sniffer_elem_classes = [ e.attrib[ 'type' ] for e in self.sniffer_elems ]
sniffers = root.find( 'sniffers' )
if sniffers:
for elem in sniffers.findall( 'sniffer' ):
+ # Keep a status of the process steps to enable stopping the process of handling the sniffer if necessary.
+ ok = True
dtype = elem.get( 'type', None )
- ok = True
- if dtype:
+ if dtype is not None:
try:
fields = dtype.split( ":" )
- datatype_module = fields[0]
- datatype_class_name = fields[1]
+ datatype_module = fields[ 0 ]
+ datatype_class_name = fields[ 1 ]
module = None
except Exception, e:
self.log.exception( 'Error determining datatype class or module for dtype %s: %s' % ( str( dtype ), str( e ) ) )
ok = False
if ok:
- #if imported_modules:
if handling_proprietary_datatypes:
# See if one of the imported modules contains the datatype class name.
for imported_module in self.imported_modules:
@@ -307,23 +340,21 @@
ok = False
if ok:
if deactivate:
+ # We are deactivating or uninstalling an installed Tool Shed repository, so eliminate the appropriate sniffers.
sniffer_class = elem.get( 'type', None )
if sniffer_class is not None:
for index, s_e_c in enumerate( sniffer_elem_classes ):
if sniffer_class == s_e_c:
del self.sniffer_elems[ index ]
- self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
+ self.log.debug( "Removed sniffer element for datatype '%s'" % str( dtype ) )
break
- for sniffer_class in self.sniff_order:
- if sniffer_class.__class__ == aclass.__class__:
- self.sniff_order.remove( sniffer_class )
- self.log.debug( "Removed sniffer class for datatype '%s' from sniff order" % dtype )
- break
+ for sniffer_class in self.sniff_order:
+ if sniffer_class.__class__ == aclass.__class__:
+ self.sniff_order.remove( sniffer_class )
+ self.log.debug( "Removed sniffer class for datatype '%s' from sniff order" % str( dtype ) )
+ break
else:
- # Keep an in-memory list of sniffer elems to enable persistence.
- if elem not in self.sniffer_elems:
- self.sniffer_elems.append( elem )
- # See if we have a conflicting sniffer already loaded.
+ # We are loading new sniffer, so see if we have a conflicting sniffer already loaded.
conflict = False
for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
if sniffer_class.__class__ == aclass.__class__:
@@ -331,7 +362,7 @@
conflict = True
if override:
del self.sniff_order[ conflict_loc ]
- self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
+ self.log.debug( "Removed conflicting sniffer for datatype '%s'" % dtype )
break
if conflict:
if override:
@@ -340,6 +371,11 @@
else:
self.sniff_order.append( aclass )
self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+ # Processing the new sniffer elem is now complete, so make sure the element defining it is loaded if necessary.
+ sniffer_class = elem.get( 'type', None )
+ if sniffer_class is not None:
+ if sniffer_class not in sniffer_elem_classes:
+ self.sniffer_elems.append( elem )
def get_datatype_class_by_name( self, name ):
"""
@@ -370,28 +406,28 @@
#
#return datatype
- def get_available_tracks(self):
+ def get_available_tracks( self ):
return self.available_tracks
- def get_mimetype_by_extension(self, ext, default = 'application/octet-stream' ):
+ def get_mimetype_by_extension( self, ext, default='application/octet-stream' ):
"""Returns a mimetype based on an extension"""
try:
- mimetype = self.mimetypes_by_extension[ext]
+ mimetype = self.mimetypes_by_extension[ ext ]
except KeyError:
#datatype was never declared
mimetype = default
- self.log.warning('unknown mimetype in data factory %s' % ext)
+ self.log.warning( 'unknown mimetype in data factory %s' % str( ext ) )
return mimetype
- def get_datatype_by_extension(self, ext ):
+ def get_datatype_by_extension( self, ext ):
"""Returns a datatype based on an extension"""
try:
- builder = self.datatypes_by_extension[ext]
+ builder = self.datatypes_by_extension[ ext ]
except KeyError:
builder = data.Text()
return builder
- def change_datatype(self, data, ext):
+ def change_datatype( self, data, ext ):
data.extension = ext
# call init_meta and copy metadata from itself. The datatype
# being converted *to* will handle any metadata copying and
@@ -401,11 +437,11 @@
data.init_meta( copy_from=data )
return data
- def old_change_datatype(self, data, ext):
+ def old_change_datatype( self, data, ext ):
"""Creates and returns a new datatype based on an existing data and an extension"""
- newdata = factory(ext)(id=data.id)
+ newdata = factory( ext )( id=data.id )
for key, value in data.__dict__.items():
- setattr(newdata, key, value)
+ setattr( newdata, key, value )
newdata.ext = ext
return newdata
@@ -422,9 +458,9 @@
# Load converters defined by local datatypes_conf.xml.
converters = self.converters
for elem in converters:
- tool_config = elem[0]
- source_datatype = elem[1]
- target_datatype = elem[2]
+ tool_config = elem[ 0 ]
+ source_datatype = elem[ 1 ]
+ target_datatype = elem[ 2 ]
if installed_repository_dict:
converter_path = installed_repository_dict[ 'converter_path' ]
else:
@@ -636,11 +672,11 @@
}
# super supertype fix for input steps in workflows.
if 'data' not in self.datatypes_by_extension:
- self.datatypes_by_extension['data'] = data.Data()
- self.mimetypes_by_extension['data'] = 'application/octet-stream'
+ self.datatypes_by_extension[ 'data' ] = data.Data()
+ self.mimetypes_by_extension[ 'data' ] = 'application/octet-stream'
# Default values - the order in which we attempt to determine data types is critical
# because some formats are much more flexibly defined than others.
- if len(self.sniff_order) < 1:
+ if len( self.sniff_order ) < 1:
self.sniff_order = [
binary.Bam(),
binary.Sff(),
@@ -666,27 +702,27 @@
tabular.Eland()
]
- def get_converters_by_datatype(self, ext):
+ def get_converters_by_datatype( self, ext ):
"""Returns available converters by source type"""
converters = odict()
- source_datatype = type(self.get_datatype_by_extension(ext))
+ source_datatype = type( self.get_datatype_by_extension( ext ) )
for ext2, dict in self.datatype_converters.items():
- converter_datatype = type(self.get_datatype_by_extension(ext2))
- if issubclass(source_datatype, converter_datatype):
- converters.update(dict)
+ converter_datatype = type( self.get_datatype_by_extension( ext2 ) )
+ if issubclass( source_datatype, converter_datatype ):
+ converters.update( dict )
#Ensure ext-level converters are present
if ext in self.datatype_converters.keys():
- converters.update(self.datatype_converters[ext])
+ converters.update( self.datatype_converters[ ext ] )
return converters
- def get_converter_by_target_type(self, source_ext, target_ext):
+ def get_converter_by_target_type( self, source_ext, target_ext ):
"""Returns a converter based on source and target datatypes"""
- converters = self.get_converters_by_datatype(source_ext)
+ converters = self.get_converters_by_datatype( source_ext )
if target_ext in converters.keys():
- return converters[target_ext]
+ return converters[ target_ext ]
return None
- def find_conversion_destination_for_dataset_by_extensions( self, dataset, accepted_formats, converter_safe = True ):
+ def find_conversion_destination_for_dataset_by_extensions( self, dataset, accepted_formats, converter_safe=True ):
"""Returns ( target_ext, existing converted dataset )"""
for convert_ext in self.get_converters_by_datatype( dataset.ext ):
if self.get_datatype_by_extension( convert_ext ).matches_any( accepted_formats ):
@@ -728,8 +764,7 @@
def to_xml_file( self ):
if self.xml_filename is not None:
- # If persisted previously, attempt to remove
- # the temporary file in which we were written.
+ # If persisted previously, attempt to remove the temporary file in which we were written.
try:
os.unlink( self.xml_filename )
except:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/82d876e06000/
Changeset: 82d876e06000
User: Dave Bouvier
Date: 2013-11-13 16:46:38
Summary: Restore files that were deleted due to a tool migration, but are still used in test/functiona/test_get_data.py.
Affected #: 2 files
diff -r 95b5323cef58cab0cc76f9b392fe2c9c83254030 -r 82d876e06000b1d52ec65661da2b2245dd4e76ca test-data/1.bam
Binary file test-data/1.bam has changed
diff -r 95b5323cef58cab0cc76f9b392fe2c9c83254030 -r 82d876e06000b1d52ec65661da2b2245dd4e76ca test-data/3unsorted.bam
Binary file test-data/3unsorted.bam has changed
https://bitbucket.org/galaxy/galaxy-central/commits/17ee3fa154b3/
Changeset: 17ee3fa154b3
User: Dave Bouvier
Date: 2013-11-13 17:23:54
Summary: Fix error in test_get_data.py's url paste upload test.
Affected #: 1 file
diff -r 82d876e06000b1d52ec65661da2b2245dd4e76ca -r 17ee3fa154b35f856a79be1691c5ed60832618c6 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -224,9 +224,9 @@
"""Pasted data in the upload utility"""
self.visit_page( "tool_runner/index?tool_id=upload1" )
try:
- tc.fv( "1", "file_type", ftype )
- tc.fv( "1", "dbkey", dbkey )
- tc.fv( "1", "url_paste", url_paste )
+ self.refresh_form( "file_type", ftype ) #Refresh, to support composite files
+ tc.fv( "tool_form", "dbkey", dbkey )
+ tc.fv( "tool_form", "url_paste", url_paste )
tc.submit( "runtool_btn" )
self.home()
except Exception, e:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: History panel: allow break/wrap of title on non-whitespace
by commits-noreply@bitbucket.org 13 Nov '13
by commits-noreply@bitbucket.org 13 Nov '13
13 Nov '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/95b5323cef58/
Changeset: 95b5323cef58
User: carlfeberhard
Date: 2013-11-13 16:38:47
Summary: History panel: allow break/wrap of title on non-whitespace
Affected #: 2 files
diff -r d61de8f362929483c2a6a6cb8e7b7be77fa5cfa4 -r 95b5323cef58cab0cc76f9b392fe2c9c83254030 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1540,7 +1540,7 @@
.dataset [class$=messagesmall]{margin:6px 10px 2px 8px;font-size:90%}
.dataset .help-text{font-weight:normal;font-style:italic;font-size:90%;color:#555}
.dataset .dataset-title-bar{cursor:pointer;padding:6px 10px 6px 8px}
-.dataset .dataset-title-bar .dataset-title{display:inline;font-weight:bold;text-decoration:underline;word-break:normal;line-height:16px}
+.dataset .dataset-title-bar .dataset-title{display:inline;font-weight:bold;text-decoration:underline;word-wrap:break-word;word-break:break-all;line-height:16px}
.dataset .dataset-primary-actions{float:right;margin:6px 10px 0}.dataset .dataset-primary-actions .icon-btn{margin-left:2px}
.dataset .dataset-body{display:none;background-color:rgba(255,255,255,0.30000000000000004);padding:6px 10px 6px 8px}.dataset .dataset-body [class$=messagesmall]{margin:0px 0px 8px 0px}
.dataset .dataset-body label{margin:0px;padding:0px;font-weight:normal}
diff -r d61de8f362929483c2a6a6cb8e7b7be77fa5cfa4 -r 95b5323cef58cab0cc76f9b392fe2c9c83254030 static/style/src/less/history.less
--- a/static/style/src/less/history.less
+++ b/static/style/src/less/history.less
@@ -257,7 +257,8 @@
display: inline;
font-weight: bold;
text-decoration: underline;
- word-break: normal;
+ word-wrap: break-word;
+ word-break: break-all;
line-height: 16px;
}
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Clean up commit 11300:04663654f82d.
by commits-noreply@bitbucket.org 12 Nov '13
by commits-noreply@bitbucket.org 12 Nov '13
12 Nov '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d61de8f36292/
Changeset: d61de8f36292
User: Dave Bouvier
Date: 2013-11-12 22:08:56
Summary: Clean up commit 11300:04663654f82d.
Affected #: 2 files
diff -r 9f04c4d80165d1f5261caf736aa11dbd86e36de3 -r d61de8f362929483c2a6a6cb8e7b7be77fa5cfa4 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -108,7 +108,6 @@
if proprietary_converter_path or proprietary_display_path and not handling_proprietary_datatypes:
handling_proprietary_datatypes = True
for elem in registration.findall( 'datatype' ):
- self.log.debug( elem.attrib )
extension = elem.get( 'extension', None )
dtype = elem.get( 'type', None )
type_extension = elem.get( 'type_extension', None )
@@ -161,7 +160,6 @@
ok = False
if ok:
datatype_class = None
- self.log.debug( [ proprietary_path, proprietary_datatype_module, datatype_class_name ] )
if proprietary_path and proprietary_datatype_module and datatype_class_name:
# We need to change the value of sys.path, so do it in a way that is thread-safe.
lock = threading.Lock()
@@ -183,7 +181,6 @@
try:
# The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
fields = datatype_module.split( '.' )
- self.log.debug( fields )
module = __import__( fields.pop(0) )
for mod in fields:
module = getattr( module, mod )
@@ -196,7 +193,6 @@
# A new tool shed repository that contains proprietary datatypes is being installed, and since installation
# is occurring after the datatypes registry has been initialized, its contents cannot be overridden by new
# introduced conflicting data types.
- self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
if make_subclass:
datatype_class = type( datatype_class_name, ( datatype_class, ), {} )
if extension in self.datatypes_by_extension:
@@ -243,6 +239,8 @@
else:
if elem not in self.display_app_containers:
self.display_app_containers.append( elem )
+ elif ( extension and ( dtype or type_extension ) ) and ( extension in self.datatypes_by_extension and not override ):
+ self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
# Load datatype sniffers from the config
self.load_datatype_sniffers( root,
deactivate=deactivate,
diff -r 9f04c4d80165d1f5261caf736aa11dbd86e36de3 -r d61de8f362929483c2a6a6cb8e7b7be77fa5cfa4 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -54,7 +54,7 @@
common_install_util.activate_repository( trans, repository )
except Exception, e:
error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
- log.debug( error_message )
+ log.exception( error_message )
message = '%s.<br/>You may be able to resolve this by uninstalling and then reinstalling the repository. Click <a href="%s">here</a> to uninstall the repository.' \
% ( error_message, web.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ) )
status = 'error'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Fix display of status images for installed tool shed repositories when using a proxy prefix.
by commits-noreply@bitbucket.org 12 Nov '13
by commits-noreply@bitbucket.org 12 Nov '13
12 Nov '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9f04c4d80165/
Changeset: 9f04c4d80165
User: Dave Bouvier
Date: 2013-11-12 21:36:13
Summary: Fix display of status images for installed tool shed repositories when using a proxy prefix.
Affected #: 1 file
diff -r 43231941e9705c69b2b6942c27b551ea6c12734c -r 9f04c4d80165d1f5261caf736aa11dbd86e36de3 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
--- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -2,6 +2,7 @@
from galaxy import model, util
from galaxy.web.framework.helpers import iff, grids
+from galaxy.web import url_for
from galaxy.model.orm import or_
import tool_shed.util.shed_util_common as suc
from tool_shed.util import tool_dependency_util
@@ -13,42 +14,42 @@
deprecated_tip_str = 'class="icon-button" title="This repository is deprecated in the Tool Shed"'
else:
deprecated_tip_str = ''
- return '<img src="/static/images/icon_error_sml.gif" %s/>' % deprecated_tip_str
+ return '<img src="%s/images/icon_error_sml.gif" %s/>' % ( url_for( '/static' ), deprecated_tip_str )
def generate_includes_workflows_img_str( include_mouse_over=False ):
if include_mouse_over:
deprecated_tip_str = 'class="icon-button" title="This repository contains exported workflows"'
else:
deprecated_tip_str = ''
- return '<img src="/static/images/fugue/gear.png" %s/>' % deprecated_tip_str
+ return '<img src="%s/images/fugue/gear.png" %s/>' % ( url_for( '/static' ), deprecated_tip_str )
def generate_latest_revision_img_str( include_mouse_over=False ):
if include_mouse_over:
latest_revision_tip_str = 'class="icon-button" title="This is the latest installable revision of this repository"'
else:
latest_revision_tip_str = ''
- return '<img src="/static/june_2007_style/blue/ok_small.png" %s/>' % latest_revision_tip_str
+ return '<img src="%s/june_2007_style/blue/ok_small.png" %s/>' % ( url_for( '/static' ), latest_revision_tip_str )
def generate_revision_updates_img_str( include_mouse_over=False ):
if include_mouse_over:
revision_updates_tip_str = 'class="icon-button" title="Updates are available in the Tool Shed for this revision"'
else:
revision_updates_tip_str = ''
- return '<img src="/static/images/icon_warning_sml.gif" %s/>' % revision_updates_tip_str
+ return '<img src="%s/images/icon_warning_sml.gif" %s/>' % ( url_for( '/static' ), revision_updates_tip_str )
def generate_revision_upgrades_img_str( include_mouse_over=False ):
if include_mouse_over:
revision_upgrades_tip_str = 'class="icon-button" title="A newer installable revision is available for this repository"'
else:
revision_upgrades_tip_str = ''
- return '<img src="/static/images/up.gif" %s/>' % revision_upgrades_tip_str
+ return '<img src="%s/images/up.gif" %s/>' % ( url_for( '/static' ), revision_upgrades_tip_str )
def generate_unknown_img_str( include_mouse_over=False ):
if include_mouse_over:
unknown_tip_str = 'class="icon-button" title="Unable to get information from the Tool Shed"'
else:
unknown_tip_str = ''
- return '<img src="/static/june_2007_style/blue/question-octagon-frame.png" %s/>' % unknown_tip_str
+ return '<img src="%s/june_2007_style/blue/question-octagon-frame.png" %s/>' % ( url_for( '/static' ), unknown_tip_str )
class InstalledRepositoryGrid( grids.Grid ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/04663654f82d/
Changeset: 04663654f82d
User: Dave Bouvier
Date: 2013-11-12 20:57:26
Summary: Clean up the load_datatypes method in the datatypes registry. Make the code pep8 compliant. Only load sniffers if they have not already been loaded. Do not load sniffers for datatypes that are being deactivated. Refactor loading of datatype sniffers into its own method. Improve exception handling when loading datatypes.
Affected #: 1 file
diff -r 41037d908be82d9790170cbd632924586f45735d -r 04663654f82d1f9401749503e321ecda5ce16275 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -1,16 +1,38 @@
"""
Provides mapping between extensions and datatypes, mime-types, etc.
"""
-import os, sys, tempfile, threading, logging, imp
-import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo, binary, assembly, ngsindex, graph
+import os
+import sys
+import tempfile
+import threading
+import logging
+import imp
+import data
+import tabular
+import interval
+import images
+import sequence
+import qualityscore
+import genetics
+import xml
+import coverage
+import tracks
+import chrominfo
+import binary
+import assembly
+import ngsindex
+import graph
import galaxy.util
from galaxy.util.odict import odict
from display_applications.application import DisplayApplication
+
class ConfigurationError( Exception ):
pass
+
class Registry( object ):
+
def __init__( self ):
self.log = logging.getLogger(__name__)
self.log.addHandler( logging.NullHandler() )
@@ -47,6 +69,7 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
+
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
Parse a datatypes XML file located at root_dir/config. If deactivate is True, an installed tool shed
@@ -85,38 +108,34 @@
if proprietary_converter_path or proprietary_display_path and not handling_proprietary_datatypes:
handling_proprietary_datatypes = True
for elem in registration.findall( 'datatype' ):
- try:
- extension = elem.get( 'extension', None )
- dtype = elem.get( 'type', None )
- type_extension = elem.get( 'type_extension', None )
- mimetype = elem.get( 'mimetype', None )
- display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
- make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
- # Proprietary datatypes included in installed tool shed repositories will include two special attributes
- # (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
- proprietary_path = elem.get( 'proprietary_path', None )
- proprietary_datatype_module = elem.get( 'proprietary_datatype_module', None )
- if proprietary_path or proprietary_datatype_module and not handling_proprietary_datatypes:
- handling_proprietary_datatypes = True
- if deactivate:
- # We are deactivating an installed tool shed repository, so eliminate the
- # datatype elem from the in-memory list of datatype elems.
- for in_memory_elem in self.datatype_elems:
- in_memory_extension = in_memory_elem.get( 'extension', None )
- if in_memory_extension == extension:
- in_memory_dtype = elem.get( 'type', None )
- in_memory_type_extension = elem.get( 'type_extension', None )
- in_memory_mimetype = elem.get( 'mimetype', None )
- in_memory_display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
- in_memory_make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
- if in_memory_dtype == dtype and in_memory_type_extension == type_extension and in_memory_mimetype == mimetype \
- and in_memory_display_in_upload == display_in_upload and in_memory_make_subclass == make_subclass:
- self.datatype_elems.remove( in_memory_elem )
- else:
- # Keep an in-memory list of datatype elems to enable persistence.
- if extension not in self.datatypes_by_extension:
- self.datatype_elems.append( elem )
- if extension and extension in self.datatypes_by_extension and deactivate:
+ self.log.debug( elem.attrib )
+ extension = elem.get( 'extension', None )
+ dtype = elem.get( 'type', None )
+ type_extension = elem.get( 'type_extension', None )
+ mimetype = elem.get( 'mimetype', None )
+ display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
+ make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
+ # Proprietary datatypes included in installed tool shed repositories will include two special attributes
+ # (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
+ proprietary_path = elem.get( 'proprietary_path', None )
+ proprietary_datatype_module = elem.get( 'proprietary_datatype_module', None )
+ if proprietary_path or proprietary_datatype_module and not handling_proprietary_datatypes:
+ handling_proprietary_datatypes = True
+ if deactivate:
+ # We are deactivating an installed tool shed repository, so eliminate the
+ # datatype elem from the in-memory list of datatype elems.
+ for in_memory_elem in self.datatype_elems:
+ in_memory_extension = in_memory_elem.get( 'extension', None )
+ if in_memory_extension == extension:
+ in_memory_dtype = elem.get( 'type', None )
+ in_memory_type_extension = elem.get( 'type_extension', None )
+ in_memory_mimetype = elem.get( 'mimetype', None )
+ in_memory_display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
+ in_memory_make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
+ if in_memory_dtype == dtype and in_memory_type_extension == type_extension and in_memory_mimetype == mimetype \
+ and in_memory_display_in_upload == display_in_upload and in_memory_make_subclass == make_subclass:
+ self.datatype_elems.remove( in_memory_elem )
+ if extension and extension in self.datatypes_by_extension:
# We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
# TODO: Handle deactivating datatype converters, etc before removing from self.datatypes_by_extension.
self.log.debug( "Removing datatype with extension '%s' from the registry." % extension )
@@ -124,156 +143,432 @@
if extension in self.upload_file_formats:
self.upload_file_formats.remove( extension )
can_process_datatype = False
- else:
- can_process_datatype = ( extension and ( dtype or type_extension ) ) and ( extension not in self.datatypes_by_extension or override )
+ else:
+ # We are loading new datatypes. Keep an in-memory list of datatype elems to enable persistence.
+ if extension not in self.datatypes_by_extension:
+ self.datatype_elems.append( elem )
+ can_process_datatype = ( extension and ( dtype or type_extension ) ) and \
+ ( extension not in self.datatypes_by_extension or override )
if can_process_datatype:
+ ok = True
if dtype:
- fields = dtype.split( ':' )
- datatype_module = fields[0]
- datatype_class_name = fields[1]
- datatype_class = None
- if proprietary_path and proprietary_datatype_module and datatype_class_name:
- # We need to change the value of sys.path, so do it in a way that is thread-safe.
- lock = threading.Lock()
- lock.acquire( True )
- try:
- imported_module = __import_module( proprietary_path, proprietary_datatype_module, datatype_class_name )
- if imported_module not in self.imported_modules:
- self.imported_modules.append( imported_module )
- if hasattr( imported_module, datatype_class_name ):
- datatype_class = getattr( imported_module, datatype_class_name )
- except Exception, e:
- full_path = os.path.join( proprietary_path, proprietary_datatype_module )
- self.log.debug( "Exception importing proprietary code file %s: %s" % ( str( full_path ), str( e ) ) )
- finally:
- lock.release()
- if datatype_class is None:
- # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
- fields = datatype_module.split( '.' )
- module = __import__( fields.pop(0) )
- for mod in fields:
- module = getattr( module, mod )
- datatype_class = getattr( module, datatype_class_name )
+ try:
+ fields = dtype.split( ':' )
+ datatype_module = fields[0]
+ datatype_class_name = fields[1]
+ except Exception, e:
+ self.log.exception( 'Error parsing datatype definition for dtype %s: %s' % ( str( dtype ), str( e ) ) )
+ ok = False
+ if ok:
+ datatype_class = None
+ self.log.debug( [ proprietary_path, proprietary_datatype_module, datatype_class_name ] )
+ if proprietary_path and proprietary_datatype_module and datatype_class_name:
+ # We need to change the value of sys.path, so do it in a way that is thread-safe.
+ lock = threading.Lock()
+ lock.acquire( True )
+ try:
+ imported_module = __import_module( proprietary_path,
+ proprietary_datatype_module,
+ datatype_class_name )
+ if imported_module not in self.imported_modules:
+ self.imported_modules.append( imported_module )
+ if hasattr( imported_module, datatype_class_name ):
+ datatype_class = getattr( imported_module, datatype_class_name )
+ except Exception, e:
+ full_path = os.path.join( proprietary_path, proprietary_datatype_module )
+ self.log.debug( "Exception importing proprietary code file %s: %s" % ( str( full_path ), str( e ) ) )
+ finally:
+ lock.release()
+ if datatype_class is None:
+ try:
+ # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
+ fields = datatype_module.split( '.' )
+ self.log.debug( fields )
+ module = __import__( fields.pop(0) )
+ for mod in fields:
+ module = getattr( module, mod )
+ datatype_class = getattr( module, datatype_class_name )
+ except Exception, e:
+ self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
elif type_extension:
- datatype_class = self.datatypes_by_extension[type_extension].__class__
- if make_subclass:
- datatype_class = type( datatype_class_name, (datatype_class,), {} )
- if extension in self.datatypes_by_extension:
- self.log.warning( "Overriding conflicting datatype with extension '%s', using datatype from %s." % ( extension, config ) )
- self.datatypes_by_extension[ extension ] = datatype_class()
- if mimetype is None:
- # Use default mime type as per datatype spec
- mimetype = self.datatypes_by_extension[ extension ].get_mime()
- self.mimetypes_by_extension[ extension ] = mimetype
- if datatype_class.track_type:
- self.available_tracks.append( extension )
- if display_in_upload and extension not in self.upload_file_formats:
- self.upload_file_formats.append( extension )
- # Max file size cut off for setting optional metadata
- self.datatypes_by_extension[ extension ].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
- for converter in elem.findall( 'converter' ):
- # Build the list of datatype converters which will later be loaded into the calling app's toolbox.
- converter_config = converter.get( 'file', None )
- target_datatype = converter.get( 'target_datatype', None )
- depends_on = converter.get( 'depends_on', None )
- if depends_on and target_datatype:
- if extension not in self.converter_deps:
- self.converter_deps[extension] = {}
- self.converter_deps[extension][target_datatype] = depends_on.split(',')
- if converter_config and target_datatype:
+ datatype_class = self.datatypes_by_extension[ type_extension ].__class__
+ if not deactivate:
+ # A new tool shed repository that contains proprietary datatypes is being installed, and since installation
+ # is occurring after the datatypes registry has been initialized, its contents cannot be overridden by new
+ # introduced conflicting data types.
+ self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
+ if make_subclass:
+ datatype_class = type( datatype_class_name, ( datatype_class, ), {} )
+ if extension in self.datatypes_by_extension:
+ self.log.warning( "Overriding conflicting datatype with extension '%s', using datatype from %s." % ( extension, config ) )
+ self.datatypes_by_extension[ extension ] = datatype_class()
+ if mimetype is None:
+ # Use default mime type as per datatype spec
+ mimetype = self.datatypes_by_extension[ extension ].get_mime()
+ self.mimetypes_by_extension[ extension ] = mimetype
+ if datatype_class.track_type:
+ self.available_tracks.append( extension )
+ if display_in_upload and extension not in self.upload_file_formats:
+ self.upload_file_formats.append( extension )
+ # Max file size cut off for setting optional metadata
+ self.datatypes_by_extension[ extension ].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ for converter in elem.findall( 'converter' ):
+ # Build the list of datatype converters which will later be loaded into the calling app's toolbox.
+ converter_config = converter.get( 'file', None )
+ target_datatype = converter.get( 'target_datatype', None )
+ depends_on = converter.get( 'depends_on', None )
+ if depends_on and target_datatype:
+ if extension not in self.converter_deps:
+ self.converter_deps[extension] = {}
+ self.converter_deps[extension][target_datatype] = depends_on.split(',')
+ if converter_config and target_datatype:
+ #if imported_modules:
+ if proprietary_converter_path:
+ self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+ else:
+ self.converters.append( ( converter_config, extension, target_datatype ) )
+ for composite_file in elem.findall( 'composite_file' ):
+ # add composite files
+ name = composite_file.get( 'name', None )
+ if name is None:
+ self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+ optional = composite_file.get( 'optional', False )
+ mimetype = composite_file.get( 'mimetype', None )
+ self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
+ for display_app in elem.findall( 'display' ):
#if imported_modules:
- if proprietary_converter_path:
- self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+ if proprietary_display_path:
+ if elem not in self.proprietary_display_app_containers:
+ self.proprietary_display_app_containers.append( elem )
else:
- self.converters.append( ( converter_config, extension, target_datatype ) )
- for composite_file in elem.findall( 'composite_file' ):
- # add composite files
- name = composite_file.get( 'name', None )
- if name is None:
- self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
- optional = composite_file.get( 'optional', False )
- mimetype = composite_file.get( 'mimetype', None )
- self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
- for display_app in elem.findall( 'display' ):
- #if imported_modules:
- if proprietary_display_path:
- if elem not in self.proprietary_display_app_containers:
- self.proprietary_display_app_containers.append( elem )
- else:
- if elem not in self.display_app_containers:
- self.display_app_containers.append( elem )
- elif not deactivate:
- # A new tool shed repository that contains proprietary datatypes is being installed, and since installation
- # is occurring after the datatypes registry has been initialized, its contents cannot be overridden by new
- # introduced conflicting data types.
- self.log.warning( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
- except Exception, e:
- if deactivate:
- self.log.warning( "Error deactivating datatype with extension '%s': %s" % ( extension, str( e ) ) )
- else:
- self.log.warning( "Error loading datatype with extension '%s': %s" % ( extension, str( e ) ) )
+ if elem not in self.display_app_containers:
+ self.display_app_containers.append( elem )
# Load datatype sniffers from the config
- sniffers = root.find( 'sniffers' )
- if sniffers:
- for elem in sniffers.findall( 'sniffer' ):
- # Keep an in-memory list of sniffer elems to enable persistence.
- if elem not in self.sniffer_elems:
- self.sniffer_elems.append( elem )
- dtype = elem.get( 'type', None )
- if dtype:
- try:
- fields = dtype.split( ":" )
- datatype_module = fields[0]
- datatype_class_name = fields[1]
- module = None
- #if imported_modules:
- if handling_proprietary_datatypes:
- # See if one of the imported modules contains the datatype class name.
- for imported_module in self.imported_modules:
- if hasattr( imported_module, datatype_class_name ):
- module = imported_module
- break
- if module is None:
+ self.load_datatype_sniffers( root,
+ deactivate=deactivate,
+ handling_proprietary_datatypes=handling_proprietary_datatypes,
+ override=override )
+ self.upload_file_formats.sort()
+ # Persist the xml form of the registry into a temporary file so that it
+ # can be loaded from the command line by tools and set_metadata processing.
+ self.to_xml_file()
+ self.set_default_values()
+
+ def append_to_sniff_order():
+ # Just in case any supported data types are not included in the config's sniff_order section.
+ for ext in self.datatypes_by_extension:
+ datatype = self.datatypes_by_extension[ext]
+ included = False
+ for atype in self.sniff_order:
+ if isinstance(atype, datatype.__class__):
+ included = True
+ break
+ if not included:
+ self.sniff_order.append(datatype)
+ append_to_sniff_order()
+
+ def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
+ # Load datatype sniffers from the received XML config
+ sniffer_elem_classes = [ e.attrib[ 'type' ] for e in self.sniffer_elems ]
+ sniffers = root.find( 'sniffers' )
+ if sniffers:
+ for elem in sniffers.findall( 'sniffer' ):
+ dtype = elem.get( 'type', None )
+ ok = True
+ if dtype:
+ try:
+ fields = dtype.split( ":" )
+ datatype_module = fields[0]
+ datatype_class_name = fields[1]
+ module = None
+ except Exception, e:
+ self.log.exception( 'Error determining datatype class or module for dtype %s: %s' % ( str( dtype ), str( e ) ) )
+ ok = False
+ if ok:
+ #if imported_modules:
+ if handling_proprietary_datatypes:
+ # See if one of the imported modules contains the datatype class name.
+ for imported_module in self.imported_modules:
+ if hasattr( imported_module, datatype_class_name ):
+ module = imported_module
+ break
+ if module is None:
+ try:
# The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
module = __import__( datatype_module )
for comp in datatype_module.split( '.' )[ 1: ]:
module = getattr( module, comp )
- aclass = getattr( module, datatype_class_name )()
- if deactivate:
- if elem in self.sniffer_elems:
- self.sniffer_elems.remove( elem )
- for sniffer_class in self.sniff_order:
- if sniffer_class.__class__ == aclass.__class__:
- self.sniff_order.remove( sniffer_class )
- break
- self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
- else:
- # See if we have a conflicting sniffer already loaded.
- conflict = False
- for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
- if sniffer_class.__class__ == aclass.__class__:
- # We have a conflicting sniffer, so replace the one previously loaded.
- conflict = True
+ except Exception, e:
+ self.log.exception( "Error importing datatype class for '%s': %s" % ( str( dtype ), str( e ) ) )
+ ok = False
+ if ok:
+ try:
+ aclass = getattr( module, datatype_class_name )()
+ except Exception, e:
+ self.log.exception( 'Error calling method %s from class %s: %s' ( str( datatype_class_name ), str( module ), str( e ) ) )
+ ok = False
+ if ok:
+ if deactivate:
+ sniffer_class = elem.get( 'type', None )
+ if sniffer_class is not None:
+ for index, s_e_c in enumerate( sniffer_elem_classes ):
+ if sniffer_class == s_e_c:
+ del self.sniffer_elems[ index ]
+ self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
+ break
+ for sniffer_class in self.sniff_order:
+ if sniffer_class.__class__ == aclass.__class__:
+ self.sniff_order.remove( sniffer_class )
+ self.log.debug( "Removed sniffer class for datatype '%s' from sniff order" % dtype )
+ break
+ else:
+ # Keep an in-memory list of sniffer elems to enable persistence.
+ if elem not in self.sniffer_elems:
+ self.sniffer_elems.append( elem )
+ # See if we have a conflicting sniffer already loaded.
+ conflict = False
+ for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
+ if sniffer_class.__class__ == aclass.__class__:
+ # We have a conflicting sniffer, so replace the one previously loaded.
+ conflict = True
+ if override:
+ del self.sniff_order[ conflict_loc ]
+ self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
+ break
+ if conflict:
if override:
- del self.sniff_order[ conflict_loc ]
- self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
- break
- if conflict:
- if override:
+ self.sniff_order.append( aclass )
+ self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+ else:
self.sniff_order.append( aclass )
self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
- else:
- self.sniff_order.append( aclass )
- self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
- except Exception, exc:
+
+ def get_datatype_class_by_name( self, name ):
+ """
+ Return the datatype class where the datatype's `type` attribute
+ (as defined in the datatype_conf.xml file) contains `name`.
+ """
+ #TODO: too roundabout - would be better to generate this once as a map and store in this object
+ found_class = None
+ for ext, datatype_obj in self.datatypes_by_extension.items():
+ datatype_obj_class = datatype_obj.__class__
+ datatype_obj_class_str = str( datatype_obj_class )
+ #print datatype_obj_class_str
+ if name in datatype_obj_class_str:
+ return datatype_obj_class
+ return None
+ # these seem to be connected to the dynamic classes being generated in this file, lines 157-158
+ # they appear when a one of the three are used in inheritance with subclass="True"
+ #TODO: a possible solution is to def a fn in datatypes __init__ for creating the dynamic classes
+
+ #remap = {
+ # 'galaxy.datatypes.registry.Tabular' : galaxy.datatypes.tabular.Tabular,
+ # 'galaxy.datatypes.registry.Text' : galaxy.datatypes.data.Text,
+ # 'galaxy.datatypes.registry.Binary' : galaxy.datatypes.binary.Binary
+ #}
+ #datatype_str = str( datatype )
+ #if datatype_str in remap:
+ # datatype = remap[ datatype_str ]
+ #
+ #return datatype
+
+ def get_available_tracks(self):
+ return self.available_tracks
+
+ def get_mimetype_by_extension(self, ext, default = 'application/octet-stream' ):
+ """Returns a mimetype based on an extension"""
+ try:
+ mimetype = self.mimetypes_by_extension[ext]
+ except KeyError:
+ #datatype was never declared
+ mimetype = default
+ self.log.warning('unknown mimetype in data factory %s' % ext)
+ return mimetype
+
+ def get_datatype_by_extension(self, ext ):
+ """Returns a datatype based on an extension"""
+ try:
+ builder = self.datatypes_by_extension[ext]
+ except KeyError:
+ builder = data.Text()
+ return builder
+
+ def change_datatype(self, data, ext):
+ data.extension = ext
+ # call init_meta and copy metadata from itself. The datatype
+ # being converted *to* will handle any metadata copying and
+ # initialization.
+ if data.has_data():
+ data.set_size()
+ data.init_meta( copy_from=data )
+ return data
+
+ def old_change_datatype(self, data, ext):
+ """Creates and returns a new datatype based on an existing data and an extension"""
+ newdata = factory(ext)(id=data.id)
+ for key, value in data.__dict__.items():
+ setattr(newdata, key, value)
+ newdata.ext = ext
+ return newdata
+
+ def load_datatype_converters( self, toolbox, installed_repository_dict=None, deactivate=False ):
+ """
+ If deactivate is False, add datatype converters from self.converters or self.proprietary_converters
+ to the calling app's toolbox. If deactivate is True, eliminates relevant converters from the calling
+ app's toolbox.
+ """
+ if installed_repository_dict:
+ # Load converters defined by datatypes_conf.xml included in installed tool shed repository.
+ converters = self.proprietary_converters
+ else:
+ # Load converters defined by local datatypes_conf.xml.
+ converters = self.converters
+ for elem in converters:
+ tool_config = elem[0]
+ source_datatype = elem[1]
+ target_datatype = elem[2]
+ if installed_repository_dict:
+ converter_path = installed_repository_dict[ 'converter_path' ]
+ else:
+ converter_path = self.converters_path
+ try:
+ config_path = os.path.join( converter_path, tool_config )
+ converter = toolbox.load_tool( config_path )
+ if installed_repository_dict:
+ # If the converter is included in an installed tool shed repository, set the tool
+ # shed related tool attributes.
+ converter.tool_shed = installed_repository_dict[ 'tool_shed' ]
+ converter.repository_name = installed_repository_dict[ 'repository_name' ]
+ converter.repository_owner = installed_repository_dict[ 'repository_owner' ]
+ converter.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
+ converter.old_id = converter.id
+ # The converter should be included in the list of tools defined in tool_dicts.
+ tool_dicts = installed_repository_dict[ 'tool_dicts' ]
+ for tool_dict in tool_dicts:
+ if tool_dict[ 'id' ] == converter.id:
+ converter.guid = tool_dict[ 'guid' ]
+ converter.id = tool_dict[ 'guid' ]
+ break
+ if deactivate:
+ if converter.id in toolbox.tools_by_id:
+ del toolbox.tools_by_id[ converter.id ]
+ if source_datatype in self.datatype_converters:
+ if target_datatype in self.datatype_converters[ source_datatype ]:
+ del self.datatype_converters[ source_datatype ][ target_datatype ]
+ self.log.debug( "Deactivated converter: %s", converter.id )
+ else:
+ toolbox.tools_by_id[ converter.id ] = converter
+ if source_datatype not in self.datatype_converters:
+ self.datatype_converters[ source_datatype ] = odict()
+ self.datatype_converters[ source_datatype ][ target_datatype ] = converter
+ self.log.debug( "Loaded converter: %s", converter.id )
+ except Exception, e:
+ if deactivate:
+ self.log.exception( "Error deactivating converter from (%s): %s" % ( converter_path, str( e ) ) )
+ else:
+ self.log.exception( "Error loading converter (%s): %s" % ( converter_path, str( e ) ) )
+
+ def load_display_applications( self, installed_repository_dict=None, deactivate=False ):
+ """
+ If deactivate is False, add display applications from self.display_app_containers or
+ self.proprietary_display_app_containers to appropriate datatypes. If deactivate is
+ True, eliminates relevant display applications from appropriate datatypes.
+ """
+ if installed_repository_dict:
+ # Load display applications defined by datatypes_conf.xml included in installed tool shed repository.
+ datatype_elems = self.proprietary_display_app_containers
+ else:
+ # Load display applications defined by local datatypes_conf.xml.
+ datatype_elems = self.display_app_containers
+ for elem in datatype_elems:
+ extension = elem.get( 'extension', None )
+ for display_app in elem.findall( 'display' ):
+ display_file = display_app.get( 'file', None )
+ if installed_repository_dict:
+ display_path = installed_repository_dict[ 'display_path' ]
+ display_file_head, display_file_tail = os.path.split( display_file )
+ config_path = os.path.join( display_path, display_file_tail )
+ else:
+ config_path = os.path.join( self.display_applications_path, display_file )
+ try:
+ inherit = galaxy.util.string_as_bool( display_app.get( 'inherit', 'False' ) )
+ display_app = DisplayApplication.from_file( config_path, self )
+ if display_app:
+ if display_app.id in self.display_applications:
if deactivate:
- self.log.warning( "Error deactivating sniffer for datatype '%s': %s" % ( dtype, str( exc ) ) )
+ del self.display_applications[ display_app.id ]
else:
- self.log.warning( "Error appending sniffer for datatype '%s' to sniff_order: %s" % ( dtype, str( exc ) ) )
- self.upload_file_formats.sort()
- # Persist the xml form of the registry into a temporary file so that it
- # can be loaded from the command line by tools and set_metadata processing.
- self.to_xml_file()
+ # If we already loaded this display application, we'll use the first one loaded.
+ display_app = self.display_applications[ display_app.id ]
+ elif installed_repository_dict:
+ # If the display application is included in an installed tool shed repository,
+ # set the tool shed related tool attributes.
+ display_app.tool_shed = installed_repository_dict[ 'tool_shed' ]
+ display_app.repository_name = installed_repository_dict[ 'repository_name' ]
+ display_app.repository_owner = installed_repository_dict[ 'repository_owner' ]
+ display_app.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
+ display_app.old_id = display_app.id
+ # The display application should be included in the list of tools defined in tool_dicts.
+ tool_dicts = installed_repository_dict[ 'tool_dicts' ]
+ for tool_dict in tool_dicts:
+ if tool_dict[ 'id' ] == display_app.id:
+ display_app.guid = tool_dict[ 'guid' ]
+ display_app.id = tool_dict[ 'guid' ]
+ break
+ if deactivate:
+ if display_app.id in self.display_applications:
+ del self.display_applications[ display_app.id ]
+ if extension in self.datatypes_by_extension:
+ if display_app.id in self.datatypes_by_extension[ extension ].display_applications:
+ del self.datatypes_by_extension[ extension ].display_applications[ display_app.id ]
+ if inherit and ( self.datatypes_by_extension[ extension ], display_app ) in self.inherit_display_application_by_class:
+ self.inherit_display_application_by_class.remove( ( self.datatypes_by_extension[ extension ], display_app ) )
+ self.log.debug( "Deactivated display application '%s' for datatype '%s'." % ( display_app.id, extension ) )
+ else:
+ self.display_applications[ display_app.id ] = display_app
+ self.datatypes_by_extension[ extension ].add_display_application( display_app )
+ if inherit and ( self.datatypes_by_extension[ extension ], display_app ) not in self.inherit_display_application_by_class:
+ self.inherit_display_application_by_class.append( ( self.datatypes_by_extension[ extension ], display_app ) )
+ self.log.debug( "Loaded display application '%s' for datatype '%s', inherit=%s." % ( display_app.id, extension, inherit ) )
+ except Exception, e:
+ if deactivate:
+ self.log.exception( "Error deactivating display application (%s): %s" % ( config_path, str( e ) ) )
+ else:
+ self.log.exception( "Error loading display application (%s): %s" % ( config_path, str( e ) ) )
+ # Handle display_application subclass inheritance.
+ for extension, d_type1 in self.datatypes_by_extension.iteritems():
+ for d_type2, display_app in self.inherit_display_application_by_class:
+ current_app = d_type1.get_display_application( display_app.id, None )
+ if current_app is None and isinstance( d_type1, type( d_type2 ) ):
+ self.log.debug( "Adding inherited display application '%s' to datatype '%s'" % ( display_app.id, extension ) )
+ d_type1.add_display_application( display_app )
+
+ def load_external_metadata_tool( self, toolbox ):
+ """Adds a tool which is used to set external metadata"""
+ # We need to be able to add a job to the queue to set metadata. The queue will currently only accept jobs with an associated
+ # tool. We'll create a special tool to be used for Auto-Detecting metadata; this is less than ideal, but effective
+ # Properly building a tool without relying on parsing an XML file is near impossible...so we'll create a temporary file
+ tool_xml_text = """
+ <tool id="__SET_METADATA__" name="Set External Metadata" version="1.0.1" tool_type="set_metadata">
+ <type class="SetMetadataTool" module="galaxy.tools"/>
+ <action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/>
+ <command>$__SET_EXTERNAL_METADATA_COMMAND_LINE__</command>
+ <inputs>
+ <param format="data" name="input1" type="data" label="File to set metadata on."/>
+ <param name="__ORIGINAL_DATASET_STATE__" type="hidden" value=""/>
+ <param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value=""/>
+ </inputs>
+ </tool>
+ """
+ tmp_name = tempfile.NamedTemporaryFile()
+ tmp_name.write( tool_xml_text )
+ tmp_name.flush()
+ set_meta_tool = toolbox.load_tool( tmp_name.name )
+ toolbox.tools_by_id[ set_meta_tool.id ] = set_meta_tool
+ self.set_external_metadata_tool = set_meta_tool
+ self.log.debug( "Loaded external metadata tool: %s", self.set_external_metadata_tool.id )
+
+ def set_default_values( self ):
# Default values.
if not self.datatypes_by_extension:
self.datatypes_by_extension = {
@@ -372,236 +667,7 @@
tabular.Sam(),
tabular.Eland()
]
- def append_to_sniff_order():
- # Just in case any supported data types are not included in the config's sniff_order section.
- for ext in self.datatypes_by_extension:
- datatype = self.datatypes_by_extension[ext]
- included = False
- for atype in self.sniff_order:
- if isinstance(atype, datatype.__class__):
- included = True
- break
- if not included:
- self.sniff_order.append(datatype)
- append_to_sniff_order()
- def get_datatype_class_by_name( self, name ):
- """
- Return the datatype class where the datatype's `type` attribute
- (as defined in the datatype_conf.xml file) contains `name`.
- """
- #TODO: too roundabout - would be better to generate this once as a map and store in this object
- found_class = None
- for ext, datatype_obj in self.datatypes_by_extension.items():
- datatype_obj_class = datatype_obj.__class__
- datatype_obj_class_str = str( datatype_obj_class )
- #print datatype_obj_class_str
- if name in datatype_obj_class_str:
- return datatype_obj_class
- return None
- # these seem to be connected to the dynamic classes being generated in this file, lines 157-158
- # they appear when a one of the three are used in inheritance with subclass="True"
- #TODO: a possible solution is to def a fn in datatypes __init__ for creating the dynamic classes
-
- #remap = {
- # 'galaxy.datatypes.registry.Tabular' : galaxy.datatypes.tabular.Tabular,
- # 'galaxy.datatypes.registry.Text' : galaxy.datatypes.data.Text,
- # 'galaxy.datatypes.registry.Binary' : galaxy.datatypes.binary.Binary
- #}
- #datatype_str = str( datatype )
- #if datatype_str in remap:
- # datatype = remap[ datatype_str ]
- #
- #return datatype
-
- def get_available_tracks(self):
- return self.available_tracks
- def get_mimetype_by_extension(self, ext, default = 'application/octet-stream' ):
- """Returns a mimetype based on an extension"""
- try:
- mimetype = self.mimetypes_by_extension[ext]
- except KeyError:
- #datatype was never declared
- mimetype = default
- self.log.warning('unknown mimetype in data factory %s' % ext)
- return mimetype
- def get_datatype_by_extension(self, ext ):
- """Returns a datatype based on an extension"""
- try:
- builder = self.datatypes_by_extension[ext]
- except KeyError:
- builder = data.Text()
- return builder
- def change_datatype(self, data, ext):
- data.extension = ext
- # call init_meta and copy metadata from itself. The datatype
- # being converted *to* will handle any metadata copying and
- # initialization.
- if data.has_data():
- data.set_size()
- data.init_meta( copy_from=data )
- return data
- def old_change_datatype(self, data, ext):
- """Creates and returns a new datatype based on an existing data and an extension"""
- newdata = factory(ext)(id=data.id)
- for key, value in data.__dict__.items():
- setattr(newdata, key, value)
- newdata.ext = ext
- return newdata
- def load_datatype_converters( self, toolbox, installed_repository_dict=None, deactivate=False ):
- """
- If deactivate is False, add datatype converters from self.converters or self.proprietary_converters
- to the calling app's toolbox. If deactivate is True, eliminates relevant converters from the calling
- app's toolbox.
- """
- if installed_repository_dict:
- # Load converters defined by datatypes_conf.xml included in installed tool shed repository.
- converters = self.proprietary_converters
- else:
- # Load converters defined by local datatypes_conf.xml.
- converters = self.converters
- for elem in converters:
- tool_config = elem[0]
- source_datatype = elem[1]
- target_datatype = elem[2]
- if installed_repository_dict:
- converter_path = installed_repository_dict[ 'converter_path' ]
- else:
- converter_path = self.converters_path
- try:
- config_path = os.path.join( converter_path, tool_config )
- converter = toolbox.load_tool( config_path )
- if installed_repository_dict:
- # If the converter is included in an installed tool shed repository, set the tool
- # shed related tool attributes.
- converter.tool_shed = installed_repository_dict[ 'tool_shed' ]
- converter.repository_name = installed_repository_dict[ 'repository_name' ]
- converter.repository_owner = installed_repository_dict[ 'repository_owner' ]
- converter.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
- converter.old_id = converter.id
- # The converter should be included in the list of tools defined in tool_dicts.
- tool_dicts = installed_repository_dict[ 'tool_dicts' ]
- for tool_dict in tool_dicts:
- if tool_dict[ 'id' ] == converter.id:
- converter.guid = tool_dict[ 'guid' ]
- converter.id = tool_dict[ 'guid' ]
- break
- if deactivate:
- if converter.id in toolbox.tools_by_id:
- del toolbox.tools_by_id[ converter.id ]
- if source_datatype in self.datatype_converters:
- if target_datatype in self.datatype_converters[ source_datatype ]:
- del self.datatype_converters[ source_datatype ][ target_datatype ]
- self.log.debug( "Deactivated converter: %s", converter.id )
- else:
- toolbox.tools_by_id[ converter.id ] = converter
- if source_datatype not in self.datatype_converters:
- self.datatype_converters[ source_datatype ] = odict()
- self.datatype_converters[ source_datatype ][ target_datatype ] = converter
- self.log.debug( "Loaded converter: %s", converter.id )
- except Exception, e:
- if deactivate:
- self.log.exception( "Error deactivating converter from (%s): %s" % ( converter_path, str( e ) ) )
- else:
- self.log.exception( "Error loading converter (%s): %s" % ( converter_path, str( e ) ) )
- def load_display_applications( self, installed_repository_dict=None, deactivate=False ):
- """
- If deactivate is False, add display applications from self.display_app_containers or
- self.proprietary_display_app_containers to appropriate datatypes. If deactivate is
- True, eliminates relevant display applications from appropriate datatypes.
- """
- if installed_repository_dict:
- # Load display applications defined by datatypes_conf.xml included in installed tool shed repository.
- datatype_elems = self.proprietary_display_app_containers
- else:
- # Load display applications defined by local datatypes_conf.xml.
- datatype_elems = self.display_app_containers
- for elem in datatype_elems:
- extension = elem.get( 'extension', None )
- for display_app in elem.findall( 'display' ):
- display_file = display_app.get( 'file', None )
- if installed_repository_dict:
- display_path = installed_repository_dict[ 'display_path' ]
- display_file_head, display_file_tail = os.path.split( display_file )
- config_path = os.path.join( display_path, display_file_tail )
- else:
- config_path = os.path.join( self.display_applications_path, display_file )
- try:
- inherit = galaxy.util.string_as_bool( display_app.get( 'inherit', 'False' ) )
- display_app = DisplayApplication.from_file( config_path, self )
- if display_app:
- if display_app.id in self.display_applications:
- if deactivate:
- del self.display_applications[ display_app.id ]
- else:
- # If we already loaded this display application, we'll use the first one loaded.
- display_app = self.display_applications[ display_app.id ]
- elif installed_repository_dict:
- # If the display application is included in an installed tool shed repository,
- # set the tool shed related tool attributes.
- display_app.tool_shed = installed_repository_dict[ 'tool_shed' ]
- display_app.repository_name = installed_repository_dict[ 'repository_name' ]
- display_app.repository_owner = installed_repository_dict[ 'repository_owner' ]
- display_app.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
- display_app.old_id = display_app.id
- # The display application should be included in the list of tools defined in tool_dicts.
- tool_dicts = installed_repository_dict[ 'tool_dicts' ]
- for tool_dict in tool_dicts:
- if tool_dict[ 'id' ] == display_app.id:
- display_app.guid = tool_dict[ 'guid' ]
- display_app.id = tool_dict[ 'guid' ]
- break
- if deactivate:
- if display_app.id in self.display_applications:
- del self.display_applications[ display_app.id ]
- if extension in self.datatypes_by_extension:
- if display_app.id in self.datatypes_by_extension[ extension ].display_applications:
- del self.datatypes_by_extension[ extension ].display_applications[ display_app.id ]
- if inherit and ( self.datatypes_by_extension[ extension ], display_app ) in self.inherit_display_application_by_class:
- self.inherit_display_application_by_class.remove( ( self.datatypes_by_extension[ extension ], display_app ) )
- self.log.debug( "Deactivated display application '%s' for datatype '%s'." % ( display_app.id, extension ) )
- else:
- self.display_applications[ display_app.id ] = display_app
- self.datatypes_by_extension[ extension ].add_display_application( display_app )
- if inherit and ( self.datatypes_by_extension[ extension ], display_app ) not in self.inherit_display_application_by_class:
- self.inherit_display_application_by_class.append( ( self.datatypes_by_extension[ extension ], display_app ) )
- self.log.debug( "Loaded display application '%s' for datatype '%s', inherit=%s." % ( display_app.id, extension, inherit ) )
- except Exception, e:
- if deactivate:
- self.log.exception( "Error deactivating display application (%s): %s" % ( config_path, str( e ) ) )
- else:
- self.log.exception( "Error loading display application (%s): %s" % ( config_path, str( e ) ) )
- # Handle display_application subclass inheritance.
- for extension, d_type1 in self.datatypes_by_extension.iteritems():
- for d_type2, display_app in self.inherit_display_application_by_class:
- current_app = d_type1.get_display_application( display_app.id, None )
- if current_app is None and isinstance( d_type1, type( d_type2 ) ):
- self.log.debug( "Adding inherited display application '%s' to datatype '%s'" % ( display_app.id, extension ) )
- d_type1.add_display_application( display_app )
- def load_external_metadata_tool( self, toolbox ):
- """Adds a tool which is used to set external metadata"""
- # We need to be able to add a job to the queue to set metadata. The queue will currently only accept jobs with an associated
- # tool. We'll create a special tool to be used for Auto-Detecting metadata; this is less than ideal, but effective
- # Properly building a tool without relying on parsing an XML file is near impossible...so we'll create a temporary file
- tool_xml_text = """
- <tool id="__SET_METADATA__" name="Set External Metadata" version="1.0.1" tool_type="set_metadata">
- <type class="SetMetadataTool" module="galaxy.tools"/>
- <action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/>
- <command>$__SET_EXTERNAL_METADATA_COMMAND_LINE__</command>
- <inputs>
- <param format="data" name="input1" type="data" label="File to set metadata on."/>
- <param name="__ORIGINAL_DATASET_STATE__" type="hidden" value=""/>
- <param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value=""/>
- </inputs>
- </tool>
- """
- tmp_name = tempfile.NamedTemporaryFile()
- tmp_name.write( tool_xml_text )
- tmp_name.flush()
- set_meta_tool = toolbox.load_tool( tmp_name.name )
- toolbox.tools_by_id[ set_meta_tool.id ] = set_meta_tool
- self.set_external_metadata_tool = set_meta_tool
- self.log.debug( "Loaded external metadata tool: %s", self.set_external_metadata_tool.id )
def get_converters_by_datatype(self, ext):
"""Returns available converters by source type"""
converters = odict()
@@ -614,12 +680,14 @@
if ext in self.datatype_converters.keys():
converters.update(self.datatype_converters[ext])
return converters
+
def get_converter_by_target_type(self, source_ext, target_ext):
"""Returns a converter based on source and target datatypes"""
converters = self.get_converters_by_datatype(source_ext)
if target_ext in converters.keys():
return converters[target_ext]
return None
+
def find_conversion_destination_for_dataset_by_extensions( self, dataset, accepted_formats, converter_safe = True ):
"""Returns ( target_ext, existing converted dataset )"""
for convert_ext in self.get_converters_by_datatype( dataset.ext ):
@@ -633,8 +701,10 @@
ret_data = None
return ( convert_ext, ret_data )
return ( None, None )
+
def get_composite_extensions( self ):
return [ ext for ( ext, d_type ) in self.datatypes_by_extension.iteritems() if d_type.composite_type is not None ]
+
def get_upload_metadata_params( self, context, group, tool ):
"""Returns dict of case value:inputs for metadata conditional for upload tool"""
rval = {}
@@ -650,12 +720,14 @@
if 'auto' not in rval and 'txt' in rval: #need to manually add 'auto' datatype
rval[ 'auto' ] = rval[ 'txt' ]
return rval
+
@property
def integrated_datatypes_configs( self ):
if self.xml_filename and os.path.isfile( self.xml_filename ):
return self.xml_filename
self.to_xml_file()
return self.xml_filename
+
def to_xml_file( self ):
if self.xml_filename is not None:
# If persisted previously, attempt to remove
https://bitbucket.org/galaxy/galaxy-central/commits/393265d72786/
Changeset: 393265d72786
User: Dave Bouvier
Date: 2013-11-12 20:59:02
Summary: Update the tool dependency installation process to mark a tool dependency as installed if the path exists, contains the installation log file, and app.config.running_functional_tests is True. This enables caching of compiled or downloaded tool dependencies when running the EC2 installation and testing framework.
Affected #: 1 file
diff -r 04663654f82d1f9401749503e321ecda5ce16275 -r 393265d727868689e0f1c14d864a18dab66ea068 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -337,6 +337,18 @@
print '\nSkipping installation of tool dependency', package_name, 'version', package_version, \
'since it is installed in', install_dir, '\n'
can_install_tool_dependency = False
+ # This tool dependency was previously installed, but the record was missing from the database due to some
+ # activity outside of the control of the tool shed. Since a new record was created for it and we don't know
+ # the state of the files on disk, we will set it to an error state. If we are running functional tests, the
+ # state will be set to Installed, because previously compiled tool dependencies are not deleted by default.
+ if app.config.running_functional_tests:
+ tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
+ else:
+ error_message = 'The installation directory for this tool dependency had contents, but the database had no record. '
+ error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the '
+ error_message += 'missing database record, it is automatically set to Error.'
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = error_message
else:
error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \
( install_dir, package_name, package_version, fabric_util.INSTALLATION_LOG )
https://bitbucket.org/galaxy/galaxy-central/commits/43231941e970/
Changeset: 43231941e970
User: Dave Bouvier
Date: 2013-11-12 20:59:32
Summary: Fix missing blast.py in blast_datatypes repository tarball for tool shed functional tests.
Affected #: 1 file
diff -r 393265d727868689e0f1c14d864a18dab66ea068 -r 43231941e9705c69b2b6942c27b551ea6c12734c test/tool_shed/test_data/blast/blast_datatypes.tar
Binary file test/tool_shed/test_data/blast/blast_datatypes.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/480d5127f3ea/
Changeset: 480d5127f3ea
User: jmchilton
Date: 2013-11-12 15:59:39
Summary: Only show Manage Quotas if quotas are enabled.
Affected #: 1 file
diff -r cd12bf2ed5475c8c6ab72321e5865441c0805bd2 -r 480d5127f3ea57026b14bb560debdb2356b30b59 templates/webapps/galaxy/admin/index.mako
--- a/templates/webapps/galaxy/admin/index.mako
+++ b/templates/webapps/galaxy/admin/index.mako
@@ -52,7 +52,7 @@
<div class="toolTitle"><a href="${h.url_for( controller='admin', action='users' )}" target="galaxy_main">Manage users</a></div><div class="toolTitle"><a href="${h.url_for( controller='admin', action='groups' )}" target="galaxy_main">Manage groups</a></div><div class="toolTitle"><a href="${h.url_for( controller='admin', action='roles' )}" target="galaxy_main">Manage roles</a></div>
- <div class="toolTitle"><a href="${h.url_for( controller='userskeys', action='all_users' )}" target="galaxy_main">Manage users API keys</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='userskeys', action='all_users' )}" target="galaxy_main">Manage users API keys</a></div>
%if trans.app.config.allow_user_impersonation:
<div class="toolTitle"><a href="${h.url_for( controller='admin', action='impersonate' )}" target="galaxy_main">Impersonate a user</a></div>
%endif
@@ -62,7 +62,9 @@
<div class="toolSectionTitle">Data</div><div class="toolSectionBody"><div class="toolSectionBg">
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Manage quotas</a></div>
+ %if trans.app.config.enable_quotas:
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Manage quotas</a></div>
+ %endif
<div class="toolTitle"><a href="${h.url_for( controller='library_admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div>
%if trans.app.config.enable_beta_job_managers:
<div class="toolTitle"><a href="${h.url_for( controller='data_admin', action='manage_data' )}" target="galaxy_main">Manage local data</a></div>
https://bitbucket.org/galaxy/galaxy-central/commits/41037d908be8/
Changeset: 41037d908be8
User: jmchilton
Date: 2013-11-12 20:21:17
Summary: Merged in jmchilton/galaxy-central-fork-1 (pull request #258)
Only show Manage Quotas if quotas are enabled.
Affected #: 1 file
diff -r 7b60d7eb93520983f9ff1a731f62eac88e2979bc -r 41037d908be82d9790170cbd632924586f45735d templates/webapps/galaxy/admin/index.mako
--- a/templates/webapps/galaxy/admin/index.mako
+++ b/templates/webapps/galaxy/admin/index.mako
@@ -52,7 +52,7 @@
<div class="toolTitle"><a href="${h.url_for( controller='admin', action='users' )}" target="galaxy_main">Manage users</a></div><div class="toolTitle"><a href="${h.url_for( controller='admin', action='groups' )}" target="galaxy_main">Manage groups</a></div><div class="toolTitle"><a href="${h.url_for( controller='admin', action='roles' )}" target="galaxy_main">Manage roles</a></div>
- <div class="toolTitle"><a href="${h.url_for( controller='userskeys', action='all_users' )}" target="galaxy_main">Manage users API keys</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='userskeys', action='all_users' )}" target="galaxy_main">Manage users API keys</a></div>
%if trans.app.config.allow_user_impersonation:
<div class="toolTitle"><a href="${h.url_for( controller='admin', action='impersonate' )}" target="galaxy_main">Impersonate a user</a></div>
%endif
@@ -62,7 +62,9 @@
<div class="toolSectionTitle">Data</div><div class="toolSectionBody"><div class="toolSectionBg">
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Manage quotas</a></div>
+ %if trans.app.config.enable_quotas:
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Manage quotas</a></div>
+ %endif
<div class="toolTitle"><a href="${h.url_for( controller='library_admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div>
%if trans.app.config.enable_beta_job_managers:
<div class="toolTitle"><a href="${h.url_for( controller='data_admin', action='manage_data' )}" target="galaxy_main">Manage local data</a></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fd4b44698aeb/
Changeset: fd4b44698aeb
User: guerler
Date: 2013-11-12 19:01:17
Summary: Merge stable
Affected #: 1 file
https://bitbucket.org/galaxy/galaxy-central/commits/7b60d7eb9352/
Changeset: 7b60d7eb9352
User: guerler
Date: 2013-11-12 19:02:51
Summary: Merge
Affected #: 1 file
diff -r fd4b44698aeb0dac0217da7872425552ac390f65 -r 7b60d7eb93520983f9ff1a731f62eac88e2979bc lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -493,7 +493,7 @@
rval = {}
# Get the default value for the 'test element' and use it
# to determine the current case
- test_value = self.test_param.get_initial_value( trans, context, history=None )
+ test_value = self.test_param.get_initial_value( trans, context, history=history )
current_case = self.get_current_case( test_value, trans )
# Store the current case in a special value
rval['__current_case__'] = current_case
@@ -502,7 +502,7 @@
# Fill in state for selected case
child_context = ExpressionContext( rval, context )
for child_input in self.cases[current_case].inputs.itervalues():
- rval[ child_input.name ] = child_input.get_initial_value( trans, child_context, history=None )
+ rval[ child_input.name ] = child_input.get_initial_value( trans, child_context, history=history )
return rval
class ConditionalWhen( object ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0