1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f465995c107e/
changeset: f465995c107e
user: greg
date: 2012-02-09 17:43:03
summary: Handle multiple relationships in the tool lineage chain at the time of repository installation.
affected #: 4 files
diff -r 981551e6caca70574a3ccf4f633fb4302e855dad -r f465995c107ebb5e932404c07bb8e7f878214cc6 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -106,10 +106,11 @@
text = response.read()
response.close()
if text:
- tool_versions_dict = from_json_string( text )
- handle_tool_versions( self.app, tool_versions_dict, tool_shed_repository )
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
# Set the tool versions since they seem to be missing for this repository in the tool shed.
+ # CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
for tool_dict in metadata_dict[ 'tools' ]:
flush_needed = False
tool_id = tool_dict[ 'guid' ]
diff -r 981551e6caca70574a3ccf4f633fb4302e855dad -r f465995c107ebb5e932404c07bb8e7f878214cc6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -477,33 +477,35 @@
error = tmp_stderr.read()
tmp_stderr.close()
log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
-def handle_tool_versions( app, tool_versions, tool_shed_repository ):
+def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
- This method is used by the InstallManager, which does not have access to trans. Using
- the tool_versions dictionary retrieved from the tool shed, create the parent / child pairs
- of tool versions. The tool_versions dictionary contains { tool id : parent tool id } pairs.
+ This method is used by the InstallManager, which does not have access to trans. Using the list
+ of tool_version_dicts retrieved from the tool shed (one per chngeset revison up to the currently
+ installed changeset revision), create the parent / child pairs of tool versions. Each dictionary
+ contains { tool id : parent tool id } pairs.
"""
sa_session = app.model.context.current
- for tool_guid, parent_id in tool_versions.items():
- tool_version_using_tool_guid = get_tool_version( app, tool_guid )
- tool_version_using_parent_id = get_tool_version( app, parent_id )
- if not tool_version_using_tool_guid:
- tool_version_using_tool_guid = app.model.ToolVersion( tool_id=tool_guid, tool_shed_repository=tool_shed_repository )
- sa_session.add( tool_version_using_tool_guid )
- sa_session.flush()
- if not tool_version_using_parent_id:
- tool_version_using_parent_id = app.model.ToolVersion( tool_id=parent_id, tool_shed_repository=tool_shed_repository )
- sa_session.add( tool_version_using_parent_id )
- sa_session.flush()
- # Associate the two versions as parent / child.
- tool_version_association = get_tool_version_association( app,
- tool_version_using_parent_id,
- tool_version_using_tool_guid )
- if not tool_version_association:
- tool_version_association = app.model.ToolVersionAssociation( tool_id=tool_version_using_tool_guid.id,
- parent_id=tool_version_using_parent_id.id )
- sa_session.add( tool_version_association )
- sa_session.flush()
+ for tool_version_dict in tool_version_dicts:
+ for tool_guid, parent_id in tool_version_dict.items():
+ tool_version_using_tool_guid = get_tool_version( app, tool_guid )
+ tool_version_using_parent_id = get_tool_version( app, parent_id )
+ if not tool_version_using_tool_guid:
+ tool_version_using_tool_guid = app.model.ToolVersion( tool_id=tool_guid, tool_shed_repository=tool_shed_repository )
+ sa_session.add( tool_version_using_tool_guid )
+ sa_session.flush()
+ if not tool_version_using_parent_id:
+ tool_version_using_parent_id = app.model.ToolVersion( tool_id=parent_id, tool_shed_repository=tool_shed_repository )
+ sa_session.add( tool_version_using_parent_id )
+ sa_session.flush()
+ tool_version_association = get_tool_version_association( app,
+ tool_version_using_parent_id,
+ tool_version_using_tool_guid )
+ if not tool_version_association:
+ # Associate the two versions as parent / child.
+ tool_version_association = app.model.ToolVersionAssociation( tool_id=tool_version_using_tool_guid.id,
+ parent_id=tool_version_using_parent_id.id )
+ sa_session.add( tool_version_association )
+ sa_session.flush()
def load_datatype_items( app, repository, relative_install_dir, deactivate=False ):
# Load proprietary datatypes.
metadata = repository.metadata
diff -r 981551e6caca70574a3ccf4f633fb4302e855dad -r f465995c107ebb5e932404c07bb8e7f878214cc6 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -242,8 +242,8 @@
text = response.read()
response.close()
if text:
- tool_versions_dict = from_json_string( text )
- handle_tool_versions( trans.app, tool_versions_dict, tool_shed_repository )
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
@@ -309,8 +309,8 @@
text = response.read()
response.close()
if text:
- tool_versions_dict = from_json_string( text )
- handle_tool_versions( trans.app, tool_versions_dict, repository )
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( trans.app, tool_version_dicts, repository )
message = "Tool versions have been set for all included tools."
status = 'done'
else:
diff -r 981551e6caca70574a3ccf4f633fb4302e855dad -r f465995c107ebb5e932404c07bb8e7f878214cc6 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -774,13 +774,27 @@
return trans.response.send_redirect( url )
@web.expose
def get_tool_versions( self, trans, **kwd ):
+ """
+ For each valid /downloadable change set (up to the received changeset_revision) in the
+ repository's change log, append the change set's tool_versions dictionary to the list
+ that will be returned.
+ """
name = kwd[ 'name' ]
owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
repository = get_repository_by_name_and_owner( trans, name, owner )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ tool_version_dicts = []
+ for changeset in repo.changelog:
+ current_changeset_revision = str( repo.changectx( changeset ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
+ if repository_metadata and repository_metadata.tool_versions:
+ tool_version_dicts.append( repository_metadata.tool_versions )
+ if current_changeset_revision == changeset_revision:
+ break
if repository_metadata.tool_versions:
- return to_json_string( repository_metadata.tool_versions )
+ return to_json_string( tool_version_dicts )
return ''
@web.expose
def check_for_updates( self, trans, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/981551e6caca/
changeset: 981551e6caca
user: jgoecks
date: 2012-02-09 04:56:50
summary: Remove maximum value for Tophat parameter initial_read_mismatches.
affected #: 1 file
diff -r c06d7cef9125577820006c607c0628b4940ca57d -r 981551e6caca70574a3ccf4f633fb4302e855dad tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -207,7 +207,7 @@
<param name="max_multihits" type="integer" value="40" label="Maximum number of alignments to be allowed" /><param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" /><param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
- <param name="initial_read_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in the initial read mapping" />
+ <param name="initial_read_mismatches" type="integer" min="0" value="2" label="Number of mismatches allowed in the initial read mapping" /><param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" /><param name="seg_length" type="integer" value="25" label="Minimum length of read segments" />
@@ -314,7 +314,7 @@
<param name="max_multihits" type="integer" value="40" label="Maximum number of alignments to be allowed" /><param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" /><param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
- <param name="initial_read_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in the initial read mapping" />
+ <param name="initial_read_mismatches" type="integer" min="0" value="2" label="Number of mismatches allowed in the initial read mapping" /><param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" /><param name="seg_length" type="integer" value="25" label="Minimum length of read segments" /><!-- Options for supplying own junctions. -->
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c06d7cef9125/
changeset: c06d7cef9125
user: greg
date: 2012-02-08 19:46:14
summary: When handling datatype conflicts always override previously loaded datatypes with the datatype currently being loaded. Use the same behavior for sniffers.
affected #: 2 files
diff -r 7c7b3f6ebbdbef5e9101c09037905a01511e9b9c -r c06d7cef9125577820006c607c0628b4940ca57d lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -57,7 +57,7 @@
# This will also load proprietary datatype converters and display applications.
self.installed_repository_manager.load_proprietary_datatypes()
# Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
- self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config, override_conflicts=True )
+ self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
galaxy.model.set_datatypes_registry( self.datatypes_registry )
# Security helper
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
diff -r 7c7b3f6ebbdbef5e9101c09037905a01511e9b9c -r c06d7cef9125577820006c607c0628b4940ca57d lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -26,8 +26,6 @@
self.converter_deps = {}
self.available_tracks = []
self.set_external_metadata_tool = None
- # Keep a list of datatypes with loaded sniffers for handling potential conflicts.
- self.datatype_sniffers = []
self.sniff_order = []
self.upload_file_formats = []
# Datatype elements defined in local datatypes_conf.xml that contain display applications.
@@ -49,7 +47,7 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
- def load_datatypes( self, root_dir=None, config=None, imported_modules=None, deactivate=False, override_conflicts=False ):
+ def load_datatypes( self, root_dir=None, config=None, imported_modules=None, deactivate=False ):
"""
Parse a datatypes XML file located at root_dir/config. If imported_modules is received, it
is a list of imported datatypes class files included in an installed tool shed repository.
@@ -100,13 +98,10 @@
else:
# Keep an in-memory list of datatype elems to enable persistence.
self.datatype_elems.append( elem )
- if extension and extension in self.datatypes_by_extension and not override_conflicts:
- if deactivate:
- # We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
- self.log.debug( "Removing datatype with extension '%s' from the registry." % extension )
- del self.datatypes_by_extension[ extension ]
- else:
- self.log.debug( "Ignoring datatype with extension '%s' from %s because the registry already contains a datatype with that extension." % ( extension, config ) )
+ if extension and extension in self.datatypes_by_extension and deactivate:
+ # We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
+ self.log.debug( "Removing datatype with extension '%s' from the registry." % extension )
+ del self.datatypes_by_extension[ extension ]
elif extension and ( dtype or type_extension ):
if dtype:
fields = dtype.split( ':' )
@@ -120,8 +115,7 @@
datatype_class = getattr( imported_module, datatype_class_name )
break
if datatype_class is None:
- # The datatype class name must be contained in one of the datatype
- # modules in the Galaxy distribution.
+ # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
fields = datatype_module.split( '.' )
module = __import__( fields.pop(0) )
for mod in fields:
@@ -131,52 +125,48 @@
datatype_class = self.datatypes_by_extension[type_extension].__class__
if make_subclass:
datatype_class = type( datatype_class_name, (datatype_class,), {} )
- if extension not in self.datatypes_by_extension or override_conflicts:
- if extension in self.datatypes_by_extension:
- self.log.warning( "Overriding conflicting extension '%s', using datatype with same extension from %s." % ( extension, config ) )
- self.datatypes_by_extension[ extension ] = datatype_class()
- if mimetype is None:
- # Use default mime type as per datatype spec
- mimetype = self.datatypes_by_extension[extension].get_mime()
- self.mimetypes_by_extension[extension] = mimetype
- if hasattr( datatype_class, "get_track_type" ):
- self.available_tracks.append( extension )
- if display_in_upload:
- self.upload_file_formats.append( extension )
- # Max file size cut off for setting optional metadata
- self.datatypes_by_extension[extension].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
- for converter in elem.findall( 'converter' ):
- # Build the list of datatype converters which will later be loaded
- # into the calling app's toolbox.
- converter_config = converter.get( 'file', None )
- target_datatype = converter.get( 'target_datatype', None )
- depends_on = converter.get( 'depends_on', None )
- if depends_on and target_datatype:
- if extension not in self.converter_deps:
- self.converter_deps[extension] = {}
- self.converter_deps[extension][target_datatype] = depends_on.split(',')
- if converter_config and target_datatype:
- if imported_modules:
- self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
- else:
- self.converters.append( ( converter_config, extension, target_datatype ) )
- for composite_file in elem.findall( 'composite_file' ):
- # add composite files
- name = composite_file.get( 'name', None )
- if name is None:
- self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
- optional = composite_file.get( 'optional', False )
- mimetype = composite_file.get( 'mimetype', None )
- self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
- for display_app in elem.findall( 'display' ):
+ if extension in self.datatypes_by_extension:
+ self.log.warning( "Overriding conflicting datatype with extension '%s', using datatype from %s." % ( extension, config ) )
+ self.datatypes_by_extension[ extension ] = datatype_class()
+ if mimetype is None:
+ # Use default mime type as per datatype spec
+ mimetype = self.datatypes_by_extension[extension].get_mime()
+ self.mimetypes_by_extension[extension] = mimetype
+ if hasattr( datatype_class, "get_track_type" ):
+ self.available_tracks.append( extension )
+ if display_in_upload:
+ self.upload_file_formats.append( extension )
+ # Max file size cut off for setting optional metadata
+ self.datatypes_by_extension[extension].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ for converter in elem.findall( 'converter' ):
+ # Build the list of datatype converters which will later be loaded into the calling app's toolbox.
+ converter_config = converter.get( 'file', None )
+ target_datatype = converter.get( 'target_datatype', None )
+ depends_on = converter.get( 'depends_on', None )
+ if depends_on and target_datatype:
+ if extension not in self.converter_deps:
+ self.converter_deps[extension] = {}
+ self.converter_deps[extension][target_datatype] = depends_on.split(',')
+ if converter_config and target_datatype:
if imported_modules:
- if elem not in self.proprietary_display_app_containers:
- self.proprietary_display_app_containers.append( elem )
+ self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
else:
- if elem not in self.display_app_containers:
- self.display_app_containers.append( elem )
- else:
- self.log.debug( "Ignoring datatype with extension '%s' from %s because the registry already contains a datatype with that extension." % ( extension, config ) )
+ self.converters.append( ( converter_config, extension, target_datatype ) )
+ for composite_file in elem.findall( 'composite_file' ):
+ # add composite files
+ name = composite_file.get( 'name', None )
+ if name is None:
+ self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+ optional = composite_file.get( 'optional', False )
+ mimetype = composite_file.get( 'mimetype', None )
+ self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
+ for display_app in elem.findall( 'display' ):
+ if imported_modules:
+ if elem not in self.proprietary_display_app_containers:
+ self.proprietary_display_app_containers.append( elem )
+ else:
+ if elem not in self.display_app_containers:
+ self.display_app_containers.append( elem )
except Exception, e:
if deactivate:
self.log.warning( "Error deactivating datatype with extension '%s': %s" % ( extension, str( e ) ) )
@@ -202,26 +192,33 @@
module = imported_module
break
if module is None:
- # The datatype class name must be contained in one of the datatype
- # modules in the Galaxy distribution.
+ # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
module = __import__( datatype_module )
for comp in datatype_module.split( '.' )[ 1: ]:
module = getattr( module, comp )
aclass = getattr( module, datatype_class_name )()
+ # See if we have a conflicting sniffer already loaded.
+ conflict_loc = None
+ conflict = False
+ for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
+ if sniffer_class.__class__ == aclass.__class__:
+ conflict = True
+ break
if deactivate:
- self.sniff_order.remove( aclass )
- self.datatype_sniffers.remove( dtype )
+ for sniffer_class in self.sniff_order:
+ if sniffer_class.__class__ == aclass.__class__:
+ self.sniff_order.remove( sniffer_class )
+ break
self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
else:
- if override_conflicts and dtype in self.datatype_sniffers:
+ if conflict:
+ # We have a conflicting sniffer, so replace the one previously loaded.
+ del self.sniff_order[ conflict_loc ]
self.sniff_order.append( aclass )
- self.log.debug( "Loaded additional sniffer for datatype '%s'" % dtype )
- elif dtype not in self.datatype_sniffers:
- self.datatype_sniffers.append( dtype )
+ self.log.debug( "Replaced conflicting sniffer for datatype '%s'" % dtype )
+ else:
self.sniff_order.append( aclass )
self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
- else:
- self.log.debug( "Ignoring sniffer for datatype '%s' because the registry already contains one." % dtype )
except Exception, exc:
if deactivate:
self.log.warning( "Error deactivating sniffer for datatype '%s': %s" % ( dtype, str( exc ) ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7c7b3f6ebbdb/
changeset: 7c7b3f6ebbdb
user: jgoecks
date: 2012-02-08 17:55:49
summary: Fix bug in GFFDataProvider's data iterator.
affected #: 1 file
diff -r 865d998a693de676c01daea4128d20124d2015c0 -r 7c7b3f6ebbdbef5e9101c09037905a01511e9b9c lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -927,18 +927,20 @@
def get_iterator( self, chrom, start, end ):
"""
- Returns an iterator that provides data in the region chrom:start-end
+ Returns an iterator that provides data in the region chrom:start-end as well as
+ a file offset.
"""
start, end = int( start ), int( end )
source = open( self.original_dataset.file_name )
def features_in_region_iter():
+ offset = 0
for feature in GFFReaderWrapper( source, fix_strand=True ):
# Only provide features that are in region.
feature_start, feature_end = convert_gff_coords_to_bed( [ feature.start, feature.end ] )
- if feature.chrom != chrom or feature_end < start or feature_start > end:
- continue
- yield feature
+ if feature.chrom == chrom and feature_end > start and feature_start < end:
+ yield feature, offset
+ offset += feature.raw_size
return features_in_region_iter()
def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
@@ -949,9 +951,8 @@
no_detail = ( "no_detail" in kwargs )
results = []
message = None
- offset = 0
-
- for count, feature in enumerate( iterator ):
+
+ for count, ( feature, offset ) in enumerate( iterator ):
if count < start_val:
continue
if count-start_val >= max_vals:
@@ -961,7 +962,7 @@
payload = package_gff_feature( feature, no_detail=no_detail, filter_cols=filter_cols )
payload.insert( 0, offset )
results.append( payload )
- offset += feature.raw_size
+
return { 'data': results, 'message': message }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/865d998a693d/
changeset: 865d998a693d
user: greg
date: 2012-02-08 17:30:58
summary: Load proprietary datatypes from installed tool shed repositories before the datatypes in the Galaxy distribution are loaded. We do this because the distribution includes some extremely generic sniffers (eg text,xml) which will catch pretty much anything, making it impossible for proprietary sniffers to be used. Proprietary datatypes contained in installed repositories are loaded in order of oldest installation first, followed by next olderst installation, etc.
affected #: 3 files
diff -r d0e5fa20e4917aac9d058413dcf07b06f0dfec92 -r 865d998a693de676c01daea4128d20124d2015c0 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -22,15 +22,6 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
- # Initialize the datatypes registry to the default data types included in self.config.datatypes_config.
- self.datatypes_registry = galaxy.datatypes.registry.Registry()
- self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
- galaxy.model.set_datatypes_registry( self.datatypes_registry )
- # Set up the tool sheds registry
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -48,6 +39,26 @@
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
object_store = self.object_store )
+ # Set up the tool sheds registry
+ if os.path.isfile( self.config.tool_sheds_config ):
+ self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ else:
+ self.tool_shed_registry = None
+ # Manage installed tool shed repositories.
+ self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
+ # Create an empty datatypes registry.
+ self.datatypes_registry = galaxy.datatypes.registry.Registry()
+ # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
+ # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
+ # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
+ # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
+ # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict
+ # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
+ # This will also load proprietary datatype converters and display applications.
+ self.installed_repository_manager.load_proprietary_datatypes()
+ # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
+ self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config, override_conflicts=True )
+ galaxy.model.set_datatypes_registry( self.datatypes_registry )
# Security helper
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
# Tag handler
@@ -63,28 +74,22 @@
if self.config.get_bool( 'enable_tool_shed_install', False ):
from tool_shed import install_manager
self.install_manager = install_manager.InstallManager( self, self.config.tool_shed_install_config, self.config.install_tool_config )
- # If enabled, poll respective tool sheds to see if updates are
- # available for any installed tool shed repositories.
+ # If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed import update_manager
self.update_manager = update_manager.UpdateManager( self )
- # Manage installed tool shed repositories
- self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Load datatype display applications defined in local datatypes_conf.xml
self.datatypes_registry.load_display_applications()
# Load datatype converters defined in local datatypes_conf.xml
self.datatypes_registry.load_datatype_converters( self.toolbox )
- # Load history import/export tools
- load_history_imp_exp_tools( self.toolbox )
# Load external metadata tool
self.datatypes_registry.load_external_metadata_tool( self.toolbox )
- # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed
- # repositories. This will also load all proprietary datatype converters and display_applications.
- self.installed_repository_manager.load_proprietary_datatypes()
- # Load security policy
+ # Load history import/export tools.
+ load_history_imp_exp_tools( self.toolbox )
+ # Load security policy.
self.security_agent = self.model.security_agent
self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions )
- # Load quota management
+ # Load quota management.
if self.config.enable_quotas:
self.quota_agent = galaxy.quota.QuotaAgent( self.model )
else:
diff -r d0e5fa20e4917aac9d058413dcf07b06f0dfec92 -r 865d998a693de676c01daea4128d20124d2015c0 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -26,13 +26,14 @@
self.converter_deps = {}
self.available_tracks = []
self.set_external_metadata_tool = None
+ # Keep a list of datatypes with loaded sniffers for handling potential conflicts.
+ self.datatype_sniffers = []
self.sniff_order = []
self.upload_file_formats = []
- # Datatype elements defined in local datatypes_conf.xml
- # that contain display applications
+ # Datatype elements defined in local datatypes_conf.xml that contain display applications.
self.display_app_containers = []
# Datatype elements in datatypes_conf.xml included in installed
- # tool shed repositories that contain display applications
+ # tool shed repositories that contain display applications.
self.proprietary_display_app_containers = []
# Map a display application id to a display application
self.display_applications = odict()
@@ -48,7 +49,7 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
- def load_datatypes( self, root_dir=None, config=None, imported_modules=None, deactivate=False ):
+ def load_datatypes( self, root_dir=None, config=None, imported_modules=None, deactivate=False, override_conflicts=False ):
"""
Parse a datatypes XML file located at root_dir/config. If imported_modules is received, it
is a list of imported datatypes class files included in an installed tool shed repository.
@@ -99,13 +100,13 @@
else:
# Keep an in-memory list of datatype elems to enable persistence.
self.datatype_elems.append( elem )
- if extension and extension in self.datatypes_by_extension:
+ if extension and extension in self.datatypes_by_extension and not override_conflicts:
if deactivate:
# We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
self.log.debug( "Removing datatype with extension '%s' from the registry." % extension )
del self.datatypes_by_extension[ extension ]
else:
- self.log.debug( "Ignoring datatype with extension '%s' from '%s' because the registry already contains a datatype with that extension." % ( extension, config ) )
+ self.log.debug( "Ignoring datatype with extension '%s' from %s because the registry already contains a datatype with that extension." % ( extension, config ) )
elif extension and ( dtype or type_extension ):
if dtype:
fields = dtype.split( ':' )
@@ -130,52 +131,57 @@
datatype_class = self.datatypes_by_extension[type_extension].__class__
if make_subclass:
datatype_class = type( datatype_class_name, (datatype_class,), {} )
- self.datatypes_by_extension[extension] = datatype_class()
- if mimetype is None:
- # Use default mime type as per datatype spec
- mimetype = self.datatypes_by_extension[extension].get_mime()
- self.mimetypes_by_extension[extension] = mimetype
- if hasattr( datatype_class, "get_track_type" ):
- self.available_tracks.append( extension )
- if display_in_upload:
- self.upload_file_formats.append( extension )
- # Max file size cut off for setting optional metadata
- self.datatypes_by_extension[extension].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
- for converter in elem.findall( 'converter' ):
- # Build the list of datatype converters which will later be loaded
- # into the calling app's toolbox.
- converter_config = converter.get( 'file', None )
- target_datatype = converter.get( 'target_datatype', None )
- depends_on = converter.get( 'depends_on', None )
- if depends_on and target_datatype:
- if extension not in self.converter_deps:
- self.converter_deps[extension] = {}
- self.converter_deps[extension][target_datatype] = depends_on.split(',')
- if converter_config and target_datatype:
+ if extension not in self.datatypes_by_extension or override_conflicts:
+ if extension in self.datatypes_by_extension:
+ self.log.warning( "Overriding conflicting extension '%s', using datatype with same extension from %s." % ( extension, config ) )
+ self.datatypes_by_extension[ extension ] = datatype_class()
+ if mimetype is None:
+ # Use default mime type as per datatype spec
+ mimetype = self.datatypes_by_extension[extension].get_mime()
+ self.mimetypes_by_extension[extension] = mimetype
+ if hasattr( datatype_class, "get_track_type" ):
+ self.available_tracks.append( extension )
+ if display_in_upload:
+ self.upload_file_formats.append( extension )
+ # Max file size cut off for setting optional metadata
+ self.datatypes_by_extension[extension].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ for converter in elem.findall( 'converter' ):
+ # Build the list of datatype converters which will later be loaded
+ # into the calling app's toolbox.
+ converter_config = converter.get( 'file', None )
+ target_datatype = converter.get( 'target_datatype', None )
+ depends_on = converter.get( 'depends_on', None )
+ if depends_on and target_datatype:
+ if extension not in self.converter_deps:
+ self.converter_deps[extension] = {}
+ self.converter_deps[extension][target_datatype] = depends_on.split(',')
+ if converter_config and target_datatype:
+ if imported_modules:
+ self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+ else:
+ self.converters.append( ( converter_config, extension, target_datatype ) )
+ for composite_file in elem.findall( 'composite_file' ):
+ # add composite files
+ name = composite_file.get( 'name', None )
+ if name is None:
+ self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+ optional = composite_file.get( 'optional', False )
+ mimetype = composite_file.get( 'mimetype', None )
+ self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
+ for display_app in elem.findall( 'display' ):
if imported_modules:
- self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+ if elem not in self.proprietary_display_app_containers:
+ self.proprietary_display_app_containers.append( elem )
else:
- self.converters.append( ( converter_config, extension, target_datatype ) )
- for composite_file in elem.findall( 'composite_file' ):
- # add composite files
- name = composite_file.get( 'name', None )
- if name is None:
- self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
- optional = composite_file.get( 'optional', False )
- mimetype = composite_file.get( 'mimetype', None )
- self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
- for display_app in elem.findall( 'display' ):
- if imported_modules:
- if elem not in self.proprietary_display_app_containers:
- self.proprietary_display_app_containers.append( elem )
- else:
- if elem not in self.display_app_containers:
- self.display_app_containers.append( elem )
+ if elem not in self.display_app_containers:
+ self.display_app_containers.append( elem )
+ else:
+ self.log.debug( "Ignoring datatype with extension '%s' from %s because the registry already contains a datatype with that extension." % ( extension, config ) )
except Exception, e:
if deactivate:
- self.log.warning( 'Error deactivating datatype "%s": %s' % ( extension, str( e ) ) )
+ self.log.warning( "Error deactivating datatype with extension '%s': %s" % ( extension, str( e ) ) )
else:
- self.log.warning( 'Error loading datatype "%s": %s' % ( extension, str( e ) ) )
+ self.log.warning( "Error loading datatype with extension '%s': %s" % ( extension, str( e ) ) )
# Load datatype sniffers from the config
sniffers = root.find( 'sniffers' )
if sniffers:
@@ -204,15 +210,23 @@
aclass = getattr( module, datatype_class_name )()
if deactivate:
self.sniff_order.remove( aclass )
- self.log.debug( 'Deactivated sniffer for datatype: %s' % dtype )
+ self.datatype_sniffers.remove( dtype )
+ self.log.debug( "Deactivated sniffer for datatype '%s'" % dtype )
else:
- self.sniff_order.append( aclass )
- self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
+ if override_conflicts and dtype in self.datatype_sniffers:
+ self.sniff_order.append( aclass )
+ self.log.debug( "Loaded additional sniffer for datatype '%s'" % dtype )
+ elif dtype not in self.datatype_sniffers:
+ self.datatype_sniffers.append( dtype )
+ self.sniff_order.append( aclass )
+ self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+ else:
+ self.log.debug( "Ignoring sniffer for datatype '%s' because the registry already contains one." % dtype )
except Exception, exc:
if deactivate:
- self.log.warning( 'Error deactivating sniffer for datatype %s: %s' % ( dtype, str( exc ) ) )
+ self.log.warning( "Error deactivating sniffer for datatype '%s': %s" % ( dtype, str( exc ) ) )
else:
- self.log.warning( 'Error appending sniffer for datatype %s to sniff_order: %s' % ( dtype, str( exc ) ) )
+ self.log.warning( "Error appending sniffer for datatype '%s' to sniff_order: %s" % ( dtype, str( exc ) ) )
# Persist the xml form of the registry into a temporary file so that it
# can be loaded from the command line by tools and set_metadata processing.
self.to_xml_file()
diff -r d0e5fa20e4917aac9d058413dcf07b06f0dfec92 -r 865d998a693de676c01daea4128d20124d2015c0 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -32,7 +32,7 @@
table = tool_data_table_types[ type ]( table_elem )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
- log.debug( "Loaded tool data table '%s", table.name )
+ log.debug( "Loaded tool data table '%s'", table.name )
return table_elems
def add_new_entries_from_config_file( self, config_filename ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e23be4cf5276/
changeset: e23be4cf5276
user: jgoecks
date: 2012-02-07 23:29:31
summary: Trackster: do not subset data with a message.
affected #: 1 file
diff -r 63319bc07b56127bcd3c44ec6e950541c2c4aa0c -r e23be4cf5276a0570d784b3ade9fbad9f9fb351c static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -520,8 +520,7 @@
// Look in cache for data that can be used. Data can be reused if it
// has the requested data and is not summary tree and has details.
// TODO: this logic could be improved if the visualization knew whether
- // the data was "index" or "data." Also could slice the data so that
- // only data points in request are returned.
+ // the data was "index" or "data."
//
// TODO: can using resolution in the key enable LineTrack data to be subsetted appropriately?
@@ -534,9 +533,12 @@
entry_high = split_key[1];
if (low >= entry_low && high <= entry_high) {
- // This entry has requested data; return if compatible.
+ // This entry has requested data; return if compatible and if entry does not
+ // have a message. If entry has a message, then not all data available and it
+ // is better to fetch anew.
entry = this.obj_cache[key];
- if ( is_deferred(entry) || this.track.data_and_mode_compatible(entry, mode) ) {
+ if ( is_deferred(entry) ||
+ ( this.track.data_and_mode_compatible(entry, mode) && !entry.message ) ) {
// TODO: for fast lookup and processing, create new entry with only data subset?
// Entry is usable.
this.move_key_to_end(key, i);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.