1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/74d7ff952ec9/
Changeset: 74d7ff952ec9
User: greg
Date: 2014-07-23 17:26:49
Summary: Add a CustomDatatypeLoader for Galaxy installs, eliminate the use of the tool shed's datatyps_util module.
Affected #: 7 files
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -12,7 +12,6 @@
import tool_shed.repository_types.util as rt_util
from tool_shed.util import common_util
-from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import readme_util
@@ -25,6 +24,7 @@
from tool_shed.galaxy_install import dependency_display
from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.grids import admin_toolshed_grids
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
@@ -256,17 +256,17 @@
dmh.remove_from_data_manager( tool_shed_repository )
if tool_shed_repository.includes_datatypes:
# Deactivate proprietary datatypes.
- installed_repository_dict = datatype_util.load_installed_datatypes( trans.app,
- tool_shed_repository,
- repository_install_dir,
- deactivate=True )
+ cdl = custom_datatype_manager.CustomDatatypeLoader( trans.app )
+ installed_repository_dict = cdl.load_installed_datatypes( tool_shed_repository,
+ repository_install_dir,
+ deactivate=True )
if installed_repository_dict:
converter_path = installed_repository_dict.get( 'converter_path' )
if converter_path is not None:
- datatype_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
+ cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=True )
display_path = installed_repository_dict.get( 'display_path' )
if display_path is not None:
- datatype_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
+ cdl.load_installed_display_applications( installed_repository_dict, deactivate=True )
if remove_from_disk_checked:
try:
# Remove the repository from disk.
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py
@@ -0,0 +1,218 @@
+import logging
+import os
+import tempfile
+
+from galaxy.util import asbool
+
+from tool_shed.util import basic_util
+from tool_shed.util import hg_util
+from tool_shed.util import tool_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class CustomDatatypeLoader( object ):
+
+ def __init__( self, app ):
+ self.app = app
+
+ def alter_config_and_load_prorietary_datatypes( self, datatypes_config, relative_install_dir,
+ deactivate=False, override=True ):
+ """
+ Parse a custom datatypes config (a datatypes_conf.xml file included in an installed
+ tool shed repository) and add information to appropriate element attributes that will
+ enable custom datatype class modules, datatypes converters and display applications
+ to be discovered and properly imported by the datatypes registry. The value of override
+ will be False when a tool shed repository is being installed. Since installation is
+ occurring after the datatypes registry has been initialized, the registry's contents
+ cannot be overridden by conflicting data types.
+ """
+ tree, error_message = xml_util.parse_xml( datatypes_config )
+ if tree is None:
+ return None, None
+ datatypes_config_root = tree.getroot()
+ registration = datatypes_config_root.find( 'registration' )
+ if registration is None:
+ # We have valid XML, but not a valid custom datatypes definition.
+ return None, None
+ sniffers = datatypes_config_root.find( 'sniffers' )
+ converter_path, display_path = self.get_converter_and_display_paths( registration,
+ relative_install_dir )
+ if converter_path:
+ # Path to datatype converters
+ registration.attrib[ 'proprietary_converter_path' ] = converter_path
+ if display_path:
+ # Path to datatype display applications
+ registration.attrib[ 'proprietary_display_path' ] = display_path
+ relative_path_to_datatype_file_name = None
+ datatype_files = datatypes_config_root.find( 'datatype_files' )
+ datatype_class_modules = []
+ if datatype_files is not None:
+ # The <datatype_files> tag set contains any number of <datatype_file> tags.
+ # <datatype_files>
+ # <datatype_file name="gmap.py"/>
+ # <datatype_file name="metagenomics.py"/>
+ # </datatype_files>
+ # We'll add attributes to the datatype tag sets so that the modules can be properly imported
+ # by the datatypes registry.
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ datatype_class_modules.append( os.path.join( root, name ) )
+ break
+ break
+ if datatype_class_modules:
+ for relative_path_to_datatype_file_name in datatype_class_modules:
+ datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
+ for elem in registration.findall( 'datatype' ):
+ # Handle 'type' attribute which should be something like one of the following:
+ # type="gmap:GmapDB"
+ # type="galaxy.datatypes.gmap:GmapDB"
+ dtype = elem.get( 'type', None )
+ if dtype:
+ fields = dtype.split( ':' )
+ proprietary_datatype_module = fields[ 0 ]
+ if proprietary_datatype_module.find( '.' ) >= 0:
+ # Handle the case where datatype_module is "galaxy.datatypes.gmap".
+ proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
+ # The value of proprietary_path must be an absolute path due to job_working_directory.
+ elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
+ elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
+ # Temporarily persist the custom datatypes configuration file so it can be loaded into the
+ # datatypes registry.
+ fd, proprietary_datatypes_config = tempfile.mkstemp( prefix="tmp-toolshed-acalpd" )
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
+ if sniffers is not None:
+ os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ os.chmod( proprietary_datatypes_config, 0644 )
+ # Load custom datatypes
+ self.app.datatypes_registry.load_datatypes( root_dir=self.app.config.root,
+ config=proprietary_datatypes_config,
+ deactivate=deactivate,
+ override=override )
+ if deactivate:
+ # Reload the upload tool to eliminate deactivated datatype extensions from the file_type
+ # select list.
+ tool_util.reload_upload_tools( self.app )
+ else:
+ self.append_to_datatypes_registry_upload_file_formats( registration )
+ tool_util.reload_upload_tools( self.app )
+ if datatype_files is not None:
+ try:
+ os.unlink( proprietary_datatypes_config )
+ except:
+ pass
+ return converter_path, display_path
+
+ def append_to_datatypes_registry_upload_file_formats( self, elem ):
+ # See if we have any datatypes that should be displayed in the upload tool's file_type select list.
+ for datatype_elem in elem.findall( 'datatype' ):
+ extension = datatype_elem.get( 'extension', None )
+ display_in_upload = datatype_elem.get( 'display_in_upload', None )
+ if extension is not None and display_in_upload is not None:
+ display_in_upload = asbool( str( display_in_upload ) )
+ if display_in_upload and extension not in self.app.datatypes_registry.upload_file_formats:
+ self.app.datatypes_registry.upload_file_formats.append( extension )
+
+ def create_repository_dict_for_proprietary_datatypes( self, tool_shed, name, owner, installed_changeset_revision,
+ tool_dicts, converter_path=None, display_path=None ):
+ return dict( tool_shed=tool_shed,
+ repository_name=name,
+ repository_owner=owner,
+ installed_changeset_revision=installed_changeset_revision,
+ tool_dicts=tool_dicts,
+ converter_path=converter_path,
+ display_path=display_path )
+
+ def get_converter_and_display_paths( self, registration_elem, relative_install_dir ):
+ """
+ Find the relative path to data type converters and display applications included
+ in installed tool shed repositories.
+ """
+ converter_path = None
+ display_path = None
+ for elem in registration_elem.findall( 'datatype' ):
+ if not converter_path:
+ # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
+ # if it is not already set. This requires developers to place all converters in the
+ # same subdirectory within the repository hierarchy.
+ for converter in elem.findall( 'converter' ):
+ converter_config = converter.get( 'file', None )
+ if converter_config:
+ converter_config_file_name = basic_util.strip_path( converter_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == converter_config_file_name:
+ # The value of converter_path must be absolute due to job_working_directory.
+ converter_path = os.path.abspath( root )
+ break
+ if converter_path:
+ break
+ if not display_path:
+ # If any of the <datatype> tag sets contain <display> tags, set the display_path
+ # if it is not already set. This requires developers to place all display acpplications
+ # in the same subdirectory within the repository hierarchy.
+ for display_app in elem.findall( 'display' ):
+ display_config = display_app.get( 'file', None )
+ if display_config:
+ display_config_file_name = basic_util.strip_path( display_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == display_config_file_name:
+ # The value of display_path must be absolute due to job_working_directory.
+ display_path = os.path.abspath( root )
+ break
+ if display_path:
+ break
+ if converter_path and display_path:
+ break
+ return converter_path, display_path
+
+ def load_installed_datatype_converters( self, installed_repository_dict, deactivate=False ):
+ """Load or deactivate proprietary datatype converters."""
+ self.app.datatypes_registry.load_datatype_converters( self.app.toolbox,
+ installed_repository_dict=installed_repository_dict,
+ deactivate=deactivate )
+
+ def load_installed_datatypes( self, repository, relative_install_dir, deactivate=False ):
+ """
+ Load proprietary datatypes and return information needed for loading custom
+ datatypes converters and display applications later.
+ """
+ metadata = repository.metadata
+ repository_dict = None
+ datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir )
+ if datatypes_config:
+ converter_path, display_path = \
+ self.alter_config_and_load_prorietary_datatypes( datatypes_config,
+ relative_install_dir,
+ deactivate=deactivate )
+ if converter_path or display_path:
+ # Create a dictionary of tool shed repository related information.
+ repository_dict = \
+ self.create_repository_dict_for_proprietary_datatypes( tool_shed=repository.tool_shed,
+ name=repository.name,
+ owner=repository.owner,
+ installed_changeset_revision=repository.installed_changeset_revision,
+ tool_dicts=metadata.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
+ return repository_dict
+
+ def load_installed_display_applications( self, installed_repository_dict, deactivate=False ):
+ """Load or deactivate custom datatype display applications."""
+ self.app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict,
+ deactivate=deactivate )
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -18,7 +18,6 @@
from tool_shed.util import basic_util
from tool_shed.util import common_util
-from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
@@ -26,6 +25,7 @@
from tool_shed.util import tool_util
from tool_shed.util import xml_util
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
@@ -594,18 +594,19 @@
files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir )
# Load data types required by tools.
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
converter_path, display_path = \
- datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, files_dir, override=False )
+ cdl.alter_config_and_load_prorietary_datatypes( datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = \
- datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
self.app.datatypes_registry.load_datatype_converters( self.app.toolbox, installed_repository_dict=repository_dict )
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -7,12 +7,12 @@
from galaxy import util
from tool_shed.util import common_util
from tool_shed.util import container_util
-from tool_shed.util import datatype_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
from tool_shed.util import xml_util
from galaxy.model.orm import and_
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.galaxy_install.tools import data_manager
@@ -114,17 +114,17 @@
else:
repository_install_dir = os.path.abspath( relative_install_dir )
# Activate proprietary datatypes.
- installed_repository_dict = datatype_util.load_installed_datatypes( self.app,
- repository,
- repository_install_dir,
- deactivate=False )
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+ installed_repository_dict = cdl.load_installed_datatypes( repository,
+ repository_install_dir,
+ deactivate=False )
if installed_repository_dict:
converter_path = installed_repository_dict.get( 'converter_path' )
if converter_path is not None:
- datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=False )
+ cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=False )
display_path = installed_repository_dict.get( 'display_path' )
if display_path is not None:
- datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=False )
+ cdl.load_installed_display_applications( installed_repository_dict, deactivate=False )
def add_entry_to_installed_repository_dependencies_of_installed_repositories( self, repository ):
"""
@@ -732,22 +732,24 @@
self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
def load_proprietary_datatypes( self ):
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
for tool_shed_repository in self.context.query( self.install_model.ToolShedRepository ) \
- .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes==True,
- self.install_model.ToolShedRepository.table.c.deleted==False ) ) \
- .order_by( self.install_model.ToolShedRepository.table.c.id ):
+ .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes==True,
+ self.install_model.ToolShedRepository.table.c.deleted==False ) ) \
+ .order_by( self.install_model.ToolShedRepository.table.c.id ):
relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
if relative_install_dir:
- installed_repository_dict = datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
+ installed_repository_dict = cdl.load_installed_datatypes( tool_shed_repository, relative_install_dir )
if installed_repository_dict:
self.installed_repository_dicts.append( installed_repository_dict )
def load_proprietary_converters_and_display_applications( self, deactivate=False ):
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
for installed_repository_dict in self.installed_repository_dicts:
if installed_repository_dict[ 'converter_path' ]:
- datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate )
+ cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=deactivate )
if installed_repository_dict[ 'display_path' ]:
- datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate )
+ cdl.load_installed_display_applications( installed_repository_dict, deactivate=deactivate )
def purge_repository( self, repository ):
"""Purge a repository with status New (a white ghost) from the database."""
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -14,6 +14,7 @@
from galaxy.util.odict import odict
from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.tools import tool_panel_manager
@@ -22,7 +23,6 @@
from tool_shed.util import basic_util
from tool_shed.util import common_util
-from tool_shed.util import datatype_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
@@ -492,6 +492,7 @@
print '\nThe ToolMigrationManager returned the following error while installing tool dependency ', installed_tool_dependency.name, ':'
print installed_tool_dependency.error_message, '\n\n'
if 'datatypes' in metadata_dict:
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
@@ -499,21 +500,27 @@
self.app.install_model.context.flush()
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-hrc" )
datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir )
- # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
- # after this installation completes.
- converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
+ # Load proprietary data types required by tools. The value of override is not
+ # important here since the Galaxy server will be started after this installation
+ #completes.
+ converter_path, display_path = \
+ cdl.alter_config_and_load_prorietary_datatypes( datatypes_config,
+ repo_install_dir,
+ override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url,
- name=tool_shed_repository.name,
- owner=self.repository_owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = \
+ cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url,
+ name=tool_shed_repository.name,
+ owner=self.repository_owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
- self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
+ self.app.datatypes_registry.load_datatype_converters( self.toolbox,
+ installed_repository_dict=repository_dict )
if display_path:
# Load proprietary datatype display applications
self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ /dev/null
@@ -1,187 +0,0 @@
-import logging
-import os
-import tempfile
-from galaxy import eggs
-from galaxy.util import asbool
-from tool_shed.util import basic_util
-from tool_shed.util import hg_util
-from tool_shed.util import tool_util
-from tool_shed.util import xml_util
-import tool_shed.util.shed_util_common as suc
-
-log = logging.getLogger( __name__ )
-
-def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
- """
- Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
- add information to appropriate element attributes that will enable proprietary datatype class modules, datatypes converters
- and display applications to be discovered and properly imported by the datatypes registry. The value of override will
- be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
- has been initialized, the registry's contents cannot be overridden by conflicting data types.
- """
- tree, error_message = xml_util.parse_xml( datatypes_config )
- if tree is None:
- return None, None
- datatypes_config_root = tree.getroot()
- registration = datatypes_config_root.find( 'registration' )
- if registration is None:
- # We have valid XML, but not a valid proprietary datatypes definition.
- return None, None
- sniffers = datatypes_config_root.find( 'sniffers' )
- converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
- if converter_path:
- # Path to datatype converters
- registration.attrib[ 'proprietary_converter_path' ] = converter_path
- if display_path:
- # Path to datatype display applications
- registration.attrib[ 'proprietary_display_path' ] = display_path
- relative_path_to_datatype_file_name = None
- datatype_files = datatypes_config_root.find( 'datatype_files' )
- datatype_class_modules = []
- if datatype_files is not None:
- # The <datatype_files> tag set contains any number of <datatype_file> tags.
- # <datatype_files>
- # <datatype_file name="gmap.py"/>
- # <datatype_file name="metagenomics.py"/>
- # </datatype_files>
- # We'll add attributes to the datatype tag sets so that the modules can be properly imported by the datatypes registry.
- for elem in datatype_files.findall( 'datatype_file' ):
- datatype_file_name = elem.get( 'name', None )
- if datatype_file_name:
- # Find the file in the installed repository.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == datatype_file_name:
- datatype_class_modules.append( os.path.join( root, name ) )
- break
- break
- if datatype_class_modules:
- for relative_path_to_datatype_file_name in datatype_class_modules:
- datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
- for elem in registration.findall( 'datatype' ):
- # Handle 'type' attribute which should be something like one of the following:
- # type="gmap:GmapDB"
- # type="galaxy.datatypes.gmap:GmapDB"
- dtype = elem.get( 'type', None )
- if dtype:
- fields = dtype.split( ':' )
- proprietary_datatype_module = fields[ 0 ]
- if proprietary_datatype_module.find( '.' ) >= 0:
- # Handle the case where datatype_module is "galaxy.datatypes.gmap".
- proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
- # The value of proprietary_path must be an absolute path due to job_working_directory.
- elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
- elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
- # Temporarily persist the proprietary datatypes configuration file so it can be loaded into the datatypes registry.
- fd, proprietary_datatypes_config = tempfile.mkstemp( prefix="tmp-toolshed-acalpd" )
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<datatypes>\n' )
- os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
- if sniffers is not None:
- os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
- os.write( fd, '</datatypes>\n' )
- os.close( fd )
- os.chmod( proprietary_datatypes_config, 0644 )
- # Load proprietary datatypes
- app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
- if deactivate:
- # Reload the upload tool to eliminate deactivated datatype extensions from the file_type select list.
- tool_util.reload_upload_tools( app )
- else:
- append_to_datatypes_registry_upload_file_formats( app, registration )
- tool_util.reload_upload_tools( app )
- if datatype_files is not None:
- try:
- os.unlink( proprietary_datatypes_config )
- except:
- pass
- return converter_path, display_path
-
-def append_to_datatypes_registry_upload_file_formats( app, elem ):
- # See if we have any datatypes that should be displayed in the upload tool's file_type select list.
- for datatype_elem in elem.findall( 'datatype' ):
- extension = datatype_elem.get( 'extension', None )
- display_in_upload = datatype_elem.get( 'display_in_upload', None )
- if extension is not None and display_in_upload is not None:
- display_in_upload = asbool( str( display_in_upload ) )
- if display_in_upload and extension not in app.datatypes_registry.upload_file_formats:
- app.datatypes_registry.upload_file_formats.append( extension )
-
-def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
- return dict( tool_shed=tool_shed,
- repository_name=name,
- repository_owner=owner,
- installed_changeset_revision=installed_changeset_revision,
- tool_dicts=tool_dicts,
- converter_path=converter_path,
- display_path=display_path )
-
-def get_converter_and_display_paths( registration_elem, relative_install_dir ):
- """Find the relative path to data type converters and display applications included in installed tool shed repositories."""
- converter_path = None
- display_path = None
- for elem in registration_elem.findall( 'datatype' ):
- if not converter_path:
- # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
- # if it is not already set. This requires developers to place all converters in the
- # same subdirectory within the repository hierarchy.
- for converter in elem.findall( 'converter' ):
- converter_config = converter.get( 'file', None )
- if converter_config:
- converter_config_file_name = basic_util.strip_path( converter_config )
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == converter_config_file_name:
- # The value of converter_path must be absolute due to job_working_directory.
- converter_path = os.path.abspath( root )
- break
- if converter_path:
- break
- if not display_path:
- # If any of the <datatype> tag sets contain <display> tags, set the display_path
- # if it is not already set. This requires developers to place all display acpplications
- # in the same subdirectory within the repository hierarchy.
- for display_app in elem.findall( 'display' ):
- display_config = display_app.get( 'file', None )
- if display_config:
- display_config_file_name = basic_util.strip_path( display_config )
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == display_config_file_name:
- # The value of display_path must be absolute due to job_working_directory.
- display_path = os.path.abspath( root )
- break
- if display_path:
- break
- if converter_path and display_path:
- break
- return converter_path, display_path
-
-def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
- # Load or deactivate proprietary datatype converters
- app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-
-def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
- # Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
- metadata = repository.metadata
- repository_dict = None
- datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir )
- if datatypes_config:
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
- if converter_path or display_path:
- # Create a dictionary of tool shed repository related information.
- repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=repository.tool_shed,
- name=repository.name,
- owner=repository.owner,
- installed_changeset_revision=repository.installed_changeset_revision,
- tool_dicts=metadata.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
- return repository_dict
-
-def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
- # Load or deactivate proprietary datatype display applications
- app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0c62ccbc8624/
Changeset: 0c62ccbc8624
User: greg
Date: 2014-07-22 22:46:30
Summary: Move some functions out of the Tool Shed's shed_util_common module.
Affected #: 7 files
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/capsule/capsule_manager.py
--- a/lib/tool_shed/capsule/capsule_manager.py
+++ b/lib/tool_shed/capsule/capsule_manager.py
@@ -9,6 +9,7 @@
from time import strftime
from galaxy import web
+from galaxy.model.orm import and_
from galaxy.util import asbool
from galaxy.util import CHUNK_SIZE
from galaxy.util.odict import odict
@@ -408,7 +409,7 @@
flush = True
# Do not allow dependent repository revisions to be automatically installed if population
# resulted in errors.
- dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( self.app, repository_metadata )
+ dependent_downloadable_revisions = self.get_dependent_downloadable_revisions( repository_metadata )
for dependent_downloadable_revision in dependent_downloadable_revisions:
if dependent_downloadable_revision.downloadable:
dependent_downloadable_revision.downloadable = False
@@ -541,6 +542,66 @@
archives.append( archive_file_name )
return archives, error_message
+ def get_dependent_downloadable_revisions( self, repository_metadata ):
+ """
+ Return all repository_metadata records that are downloadable and that depend upon the received
+ repository_metadata record.
+ """
+ # This method is called only from the tool shed.
+ sa_session = self.app.model.context.current
+ rm_changeset_revision = repository_metadata.changeset_revision
+ rm_repository = repository_metadata.repository
+ rm_repository_name = str( rm_repository.name )
+ rm_repository_owner = str( rm_repository.user.username )
+ dependent_downloadable_revisions = []
+ for repository in sa_session.query( self.app.model.Repository ) \
+ .filter( and_( self.app.model.Repository.table.c.id != rm_repository.id,
+ self.app.model.Repository.table.c.deleted == False,
+ self.app.model.Repository.table.c.deprecated == False ) ):
+ downloadable_revisions = repository.downloadable_revisions
+ if downloadable_revisions:
+ for downloadable_revision in downloadable_revisions:
+ if downloadable_revision.has_repository_dependencies:
+ metadata = downloadable_revision.metadata
+ if metadata:
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ for repository_dependencies_tup in repository_dependencies_tups:
+ tool_shed, \
+ name, \
+ owner, \
+ changeset_revision, \
+ prior_installation_required, \
+ only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependencies_tup )
+ if name == rm_repository_name and owner == rm_repository_owner:
+ # We've discovered a repository revision that depends upon the repository associated
+ # with the received repository_metadata record, but we need to make sure it depends
+ # upon the revision.
+ if changeset_revision == rm_changeset_revision:
+ dependent_downloadable_revisions.append( downloadable_revision )
+ else:
+ # Make sure the defined changeset_revision is current.
+ defined_repository_metadata = \
+ sa_session.query( self.app.model.RepositoryMetadata ) \
+ .filter( self.app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \
+ .first()
+ if defined_repository_metadata is None:
+ # The defined changeset_revision is not associated with a repository_metadata
+ # record, so updates must be necessary.
+ defined_repository = suc.get_repository_by_name_and_owner( self.app, name, owner )
+ defined_repo = hg_util.get_repo_for_repository( self.app,
+ repository=defined_repository,
+ repo_path=None,
+ create=False )
+ updated_changeset_revision = \
+ suc.get_next_downloadable_changeset_revision( defined_repository,
+ defined_repo,
+ changeset_revision )
+ if updated_changeset_revision == rm_changeset_revision:
+ dependent_downloadable_revisions.append( downloadable_revision )
+ return dependent_downloadable_revisions
+
def get_export_info_dict( self, export_info_file_path ):
"""
Parse the export_info.xml file contained within the capsule and return a dictionary
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -342,8 +342,8 @@
installed_rd_tups = []
missing_rd_tups = []
for tsr in repository.repository_dependencies:
- prior_installation_required = suc.set_prior_installation_required( self.app, repository, tsr )
- only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr )
+ prior_installation_required = self.set_prior_installation_required( repository, tsr )
+ only_if_compiling_contained_td = self.set_only_if_compiling_contained_td( repository, tsr )
rd_tup = [ tsr.tool_shed,
tsr.name,
tsr.owner,
@@ -957,6 +957,47 @@
return True
return False
+ def set_only_if_compiling_contained_td( self, repository, required_repository ):
+ """
+ Return True if the received required_repository is only needed to compile a tool
+ dependency defined for the received repository.
+ """
+ # This method is called only from Galaxy when rendering repository dependencies
+ # for an installed tool shed repository.
+ # TODO: Do we need to check more than changeset_revision here?
+ required_repository_tup = [ required_repository.tool_shed, \
+ required_repository.name, \
+ required_repository.owner, \
+ required_repository.changeset_revision ]
+ for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
+ partial_tup = tup[ 0:4 ]
+ if partial_tup == required_repository_tup:
+ return 'True'
+ return 'False'
+
+ def set_prior_installation_required( self, repository, required_repository ):
+ """
+ Return True if the received required_repository must be installed before the
+ received repository.
+ """
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app,
+ str( required_repository.tool_shed ) )
+ required_repository_tup = [ tool_shed_url,
+ str( required_repository.name ),
+ str( required_repository.owner ),
+ str( required_repository.changeset_revision ) ]
+ # Get the list of repository dependency tuples associated with the received repository
+ # where prior_installation_required is True.
+ required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of
+ for required_rd_tup in required_rd_tups_that_must_be_installed:
+ # Repository dependency tuples in metadata include a prior_installation_required value,
+ # so strip it for comparision.
+ partial_required_rd_tup = required_rd_tup[ 0:4 ]
+ if partial_required_rd_tup == required_repository_tup:
+ # Return the string value of prior_installation_required, which defaults to 'False'.
+ return str( required_rd_tup[ 4 ] )
+ return 'False'
+
def update_existing_tool_dependency( self, repository, original_dependency_dict, new_dependencies_dict ):
"""
Update an exsiting tool dependency whose definition was updated in a change set
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
--- a/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
+++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
@@ -2,9 +2,9 @@
Class encapsulating the management of repository dependencies installed or being installed
into Galaxy from the Tool Shed.
"""
-
import json
import logging
+import os
import urllib
import urllib2
@@ -63,11 +63,10 @@
break
if d_repository is None:
# The dependent repository is not in the received list so look in the database.
- d_repository = suc.get_or_create_tool_shed_repository( self.app,
- d_toolshed,
- d_name,
- d_owner,
- d_changeset_revision )
+ d_repository = self.get_or_create_tool_shed_repository( d_toolshed,
+ d_name,
+ d_owner,
+ d_changeset_revision )
# Process each repository_dependency defined for the current dependent repository.
for repository_dependency_components_list in val:
required_repository = None
@@ -87,11 +86,10 @@
break
if required_repository is None:
# The required repository is not in the received list so look in the database.
- required_repository = suc.get_or_create_tool_shed_repository( self.app,
- rd_toolshed,
- rd_name,
- rd_owner,
- rd_changeset_revision )
+ required_repository = self.get_or_create_tool_shed_repository( rd_toolshed,
+ rd_name,
+ rd_owner,
+ rd_changeset_revision )
# Ensure there is a repository_dependency relationship between d_repository and required_repository.
rrda = None
for rd in d_repository.repository_dependencies:
@@ -257,6 +255,35 @@
self.build_repository_dependency_relationships( all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts
+ def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ):
+ """
+ Return a tool shed repository database record defined by the combination of
+ tool shed, repository name, repository owner and changeset_revision or
+ installed_changeset_revision. A new tool shed repository record will be
+ created if one is not located.
+ """
+ install_model = self.app.install_model
+ # We store the port in the database.
+ tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+ # This method is used only in Galaxy, not the tool shed.
+ repository = suc.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision )
+ if not repository:
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision )
+ repository = suc.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=None,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=install_model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=owner,
+ dist_to_shed=False )
+ return repository
+
def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
"""
Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/metadata/repository_metadata_manager.py
--- a/lib/tool_shed/metadata/repository_metadata_manager.py
+++ b/lib/tool_shed/metadata/repository_metadata_manager.py
@@ -410,7 +410,7 @@
repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( self.app, changeset_hash )
+ skip_tool_test = self.get_skip_tool_test_by_changeset_revision( changeset_hash )
if skip_tool_test:
# We found a skip_tool_test record associated with the changeset_revision,
# so see if it has a valid repository_revision.
@@ -521,6 +521,16 @@
return self.sa_session.query( self.app.model.Repository ) \
.filter( self.app.model.Repository.table.c.deleted == False )
+ def get_skip_tool_test_by_changeset_revision( self, changeset_revision ):
+ """
+ Return a skip_tool_test record whose initial_changeset_revision is the received
+ changeset_revision.
+ """
+ # There should only be one, but we'll use first() so callers won't have to handle exceptions.
+ return self.sa_session.query( self.app.model.SkipToolTest ) \
+ .filter( self.app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \
+ .first()
+
def new_datatypes_metadata_required( self, repository_metadata, metadata_dict ):
"""
Compare the last saved metadata for each datatype in the repository with the new metadata
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/tools/data_table_manager.py
--- a/lib/tool_shed/tools/data_table_manager.py
+++ b/lib/tool_shed/tools/data_table_manager.py
@@ -2,8 +2,9 @@
import os
import shutil
+from xml.etree import ElementTree as XmlET
+
from tool_shed.util import hg_util
-from tool_shed.util import shed_util_common as suc
from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -14,6 +15,36 @@
def __init__( self, app ):
self.app = app
+ def generate_repository_info_elem( self, tool_shed, repository_name, changeset_revision, owner,
+ parent_elem=None, **kwd ):
+ """Create and return an ElementTree repository info Element."""
+ if parent_elem is None:
+ elem = XmlET.Element( 'tool_shed_repository' )
+ else:
+ elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' )
+ tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = XmlET.SubElement( elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ #add additional values
+ #TODO: enhance additional values to allow e.g. use of dict values that will recurse
+ for key, value in kwd.iteritems():
+ new_elem = XmlET.SubElement( elem, key )
+ new_elem.text = value
+ return elem
+
+ def generate_repository_info_elem_from_repository( self, tool_shed_repository, parent_elem=None, **kwd ):
+ return self.generate_repository_info_elem( tool_shed_repository.tool_shed,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.owner,
+ parent_elem=parent_elem,
+ **kwd )
+
def get_tool_index_sample_files( self, sample_files ):
"""
Try to return the list of all appropriate tool data sample files included
@@ -129,7 +160,7 @@
if path:
file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
# Store repository info in the table tag set for trace-ability.
- repo_elem = suc.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
+ repo_elem = self.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
if elems:
# Remove old data_table
os.unlink( tool_data_table_conf_filename )
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -7,6 +7,18 @@
log = logging.getLogger( __name__ )
+def get_latest_changeset_revision( app, repository, repo ):
+ repository_tip = repository.tip( app )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
+ app.security.encode_id( repository.id ),
+ repository_tip )
+ if repository_metadata and repository_metadata.downloadable:
+ return repository_tip
+ changeset_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False )
+ if changeset_revisions:
+ return changeset_revisions[ -1 ]
+ return hg_util.INITIAL_CHANGELOG_HASH
+
def get_latest_repository_metadata( app, decoded_repository_id, downloadable=False ):
"""Get last metadata defined for a specified repository from the database."""
sa_session = app.model.context.current
@@ -15,7 +27,7 @@
if downloadable:
changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo )
else:
- changeset_revision = suc.get_latest_changeset_revision( app, repository, repo )
+ changeset_revision = get_latest_changeset_revision( app, repository, repo )
return suc.get_repository_metadata_by_changeset_revision( app,
app.security.encode_id( repository.id ),
changeset_revision )
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -17,7 +17,6 @@
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
-from xml.etree import ElementTree as XmlET
from urllib2 import HTTPError
log = logging.getLogger( __name__ )
@@ -165,35 +164,6 @@
components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
return components_list
-def generate_repository_info_elem( tool_shed, repository_name, changeset_revision, owner, parent_elem=None, **kwd ):
- """Create and return an ElementTree repository info Element."""
- if parent_elem is None:
- elem = XmlET.Element( 'tool_shed_repository' )
- else:
- elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' )
- tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' )
- tool_shed_elem.text = tool_shed
- repository_name_elem = XmlET.SubElement( elem, 'repository_name' )
- repository_name_elem.text = repository_name
- repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' )
- repository_owner_elem.text = owner
- changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' )
- changeset_revision_elem.text = changeset_revision
- #add additional values
- #TODO: enhance additional values to allow e.g. use of dict values that will recurse
- for key, value in kwd.iteritems():
- new_elem = XmlET.SubElement( elem, key )
- new_elem.text = value
- return elem
-
-def generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=None, **kwd ):
- return generate_repository_info_elem( tool_shed_repository.tool_shed,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.owner,
- parent_elem=parent_elem,
- **kwd )
-
def generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=None ):
"""Generate the URL for sharing a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -291,61 +261,6 @@
return repository_metadata
return None
-def get_dependent_downloadable_revisions( app, repository_metadata ):
- """
- Return all repository_metadata records that are downloadable and that depend upon the received
- repository_metadata record.
- """
- # This method is called only from the tool shed.
- sa_session = app.model.context.current
- rm_changeset_revision = repository_metadata.changeset_revision
- rm_repository = repository_metadata.repository
- rm_repository_name = str( rm_repository.name )
- rm_repository_owner = str( rm_repository.user.username )
- dependent_downloadable_revisions = []
- for repository in sa_session.query( app.model.Repository ) \
- .filter( and_( app.model.Repository.table.c.id != rm_repository.id,
- app.model.Repository.table.c.deleted == False,
- app.model.Repository.table.c.deprecated == False ) ):
- downloadable_revisions = repository.downloadable_revisions
- if downloadable_revisions:
- for downloadable_revision in downloadable_revisions:
- if downloadable_revision.has_repository_dependencies:
- metadata = downloadable_revision.metadata
- if metadata:
- repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
- repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
- for repository_dependencies_tup in repository_dependencies_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependencies_tup )
- if name == rm_repository_name and owner == rm_repository_owner:
- # We've discovered a repository revision that depends upon the repository associated
- # with the received repository_metadata record, but we need to make sure it depends
- # upon the revision.
- if changeset_revision == rm_changeset_revision:
- dependent_downloadable_revisions.append( downloadable_revision )
- else:
- # Make sure the defined changeset_revision is current.
- defined_repository_metadata = \
- sa_session.query( app.model.RepositoryMetadata ) \
- .filter( app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \
- .first()
- if defined_repository_metadata is None:
- # The defined changeset_revision is not associated with a repository_metadata
- # record, so updates must be necessary.
- defined_repository = get_repository_by_name_and_owner( app, name, owner )
- defined_repo = hg_util.get_repo_for_repository( app,
- repository=defined_repository,
- repo_path=None,
- create=False )
- updated_changeset_revision = \
- get_next_downloadable_changeset_revision( defined_repository,
- defined_repo,
- changeset_revision )
- if updated_changeset_revision == rm_changeset_revision:
- dependent_downloadable_revisions.append( downloadable_revision )
- return dependent_downloadable_revisions
-
def get_ids_of_tool_shed_repositories_being_installed( app, as_string=False ):
installing_repository_ids = []
new_status = app.install_model.ToolShedRepository.installation_status.NEW
@@ -365,18 +280,6 @@
return ','.join( installing_repository_ids )
return installing_repository_ids
-def get_latest_changeset_revision( app, repository, repo ):
- repository_tip = repository.tip( app )
- repository_metadata = get_repository_metadata_by_changeset_revision( app,
- app.security.encode_id( repository.id ),
- repository_tip )
- if repository_metadata and repository_metadata.downloadable:
- return repository_tip
- changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False )
- if changeset_revisions:
- return changeset_revisions[ -1 ]
- return hg_util.INITIAL_CHANGELOG_HASH
-
def get_latest_downloadable_changeset_revision( app, repository, repo ):
repository_tip = repository.tip( app )
repository_metadata = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( repository.id ), repository_tip )
@@ -441,37 +344,11 @@
continue
return key
-def get_or_create_tool_shed_repository( app, tool_shed, name, owner, changeset_revision ):
+def get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ):
"""
- Return a tool shed repository database record defined by the combination of
- tool shed, repository name, repository owner and changeset_revision or
- installed_changeset_revision. A new tool shed repository record will be
- created if one is not located.
+ Return an ordered list of changeset_revisions that are associated with metadata
+ where order is defined by the repository changelog.
"""
- install_model = app.install_model
- # We store the port in the database.
- tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
- # This method is used only in Galaxy, not the tool shed.
- repository = get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision )
- if not repository:
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed )
- repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- ctx_rev = get_ctx_rev( app, tool_shed_url, name, owner, changeset_revision )
- repository = create_or_update_tool_shed_repository( app=app,
- name=name,
- description=None,
- installed_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=install_model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=owner,
- dist_to_shed=False )
- return repository
-
-def get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ):
- """Return an ordered list of changeset_revisions that are associated with metadata where order is defined by the repository changelog."""
if downloadable:
metadata_revisions = repository.downloadable_revisions
else:
@@ -491,9 +368,11 @@
def get_prior_import_or_install_required_dict( app, tsr_ids, repo_info_dicts ):
"""
- This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies
- are being installed. Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained
- in the received list of tsr_ids and whose associated repository must be imported or installed prior to the repository associated with the tsr_id key.
+ This method is used in the Tool Shed when exporting a repository and its dependencies,
+ and in Galaxy when a repository and its dependencies are being installed. Return a
+ dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids,
+ each of which is contained in the received list of tsr_ids and whose associated repository
+ must be imported or installed prior to the repository associated with the tsr_id key.
"""
# Initialize the dictionary.
prior_import_or_install_required_dict = {}
@@ -698,11 +577,14 @@
def get_repository_ids_requiring_prior_import_or_install( app, tsr_ids, repository_dependencies ):
"""
- This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies
- are being installed. Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received
- tsr_ids. If so, then determine whether that required repository should be imported / installed prior to its dependent repository. Return a list
- of encoded repository ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be imported / installed
- prior to the dependent repository associated with the received repository_dependencies.
+ This method is used in the Tool Shed when exporting a repository and its dependencies,
+ and in Galaxy when a repository and its dependencies are being installed. Inspect the
+ received repository_dependencies and determine if the encoded id of each required
+ repository is in the received tsr_ids. If so, then determine whether that required
+ repository should be imported / installed prior to its dependent repository. Return a
+ list of encoded repository ids, each of which is contained in the received list of tsr_ids,
+ and whose associated repositories must be imported / installed prior to the dependent
+ repository associated with the received repository_dependencies.
"""
prior_tsr_ids = []
if repository_dependencies:
@@ -710,20 +592,32 @@
if key in [ 'description', 'root_key' ]:
continue
for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ tool_shed, \
+ name, \
+ owner, \
+ changeset_revision, \
+ prior_installation_required, \
+ only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
- # If only_if_compiling_contained_td is False, then the repository dependency is not required to be installed prior to the dependent
- # repository even if prior_installation_required is True. This is because the only meaningful content of the repository dependency
- # is its contained tool dependency, which is required in order to compile the dependent repository's tool dependency. In the scenario
- # where the repository dependency is not installed prior to the dependent repository's tool dependency compilation process, the tool
- # dependency compilation framework will install the repository dependency prior to compilation of the dependent repository's tool
- # dependency.
+ # If only_if_compiling_contained_td is False, then the repository dependency
+ # is not required to be installed prior to the dependent repository even if
+ # prior_installation_required is True. This is because the only meaningful
+ # content of the repository dependency is its contained tool dependency, which
+ # is required in order to compile the dependent repository's tool dependency.
+ # In the scenario where the repository dependency is not installed prior to the
+ # dependent repository's tool dependency compilation process, the tool dependency
+ # compilation framework will install the repository dependency prior to compilation
+ # of the dependent repository's tool dependency.
if not util.asbool( only_if_compiling_contained_td ):
if util.asbool( prior_installation_required ):
if is_tool_shed_client( app ):
# We store the port, if one exists, in the database.
tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
- repository = get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision )
+ repository = get_repository_for_dependency_relationship( app,
+ tool_shed,
+ name,
+ owner,
+ changeset_revision )
else:
repository = get_repository_by_name_and_owner( app, name, owner )
if repository:
@@ -779,14 +673,6 @@
query = app.model.context.query( app.model.Repository )
return query
-def get_skip_tool_test_by_changeset_revision( app, changeset_revision ):
- """Return a skip_tool_test record whose initial_changeset_revision is the received changeset_revision."""
- # There should only be one, but we'll use first() so callers won't have to handle exceptions.
- sa_session = app.model.context.current
- return sa_session.query( app.model.SkipToolTest ) \
- .filter( app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \
- .first()
-
def get_tool_panel_config_tool_path_install_dir( app, repository ):
"""
Return shed-related tool panel config, the tool_path configured in it, and the relative path to
@@ -1220,42 +1106,6 @@
text = re.sub( r'\.\. image:: (?!https?://)/?(.+)', r'.. image:: %s/\1' % route_to_images, text )
return text
-def set_only_if_compiling_contained_td( repository, required_repository ):
- """
- Return True if the received required_repository is only needed to compile a tool
- dependency defined for the received repository.
- """
- # This method is called only from Galaxy when rendering repository dependencies
- # for an installed tool shed repository.
- # TODO: Do we need to check more than changeset_revision here?
- required_repository_tup = [ required_repository.tool_shed, \
- required_repository.name, \
- required_repository.owner, \
- required_repository.changeset_revision ]
- for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
- partial_tup = tup[ 0:4 ]
- if partial_tup == required_repository_tup:
- return 'True'
- return 'False'
-
-def set_prior_installation_required( app, repository, required_repository ):
- """Return True if the received required_repository must be installed before the received repository."""
- # This method is called only from Galaxy when rendering repository dependencies for an installed Tool Shed repository.
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( required_repository.tool_shed ) )
- required_repository_tup = [ tool_shed_url,
- str( required_repository.name ),
- str( required_repository.owner ),
- str( required_repository.changeset_revision ) ]
- # Get the list of repository dependency tuples associated with the received repository where prior_installation_required is True.
- required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of
- for required_rd_tup in required_rd_tups_that_must_be_installed:
- # Repository dependency tuples in metadata include a prior_installation_required value, so strip it for comparision.
- partial_required_rd_tup = required_rd_tup[ 0:4 ]
- if partial_required_rd_tup == required_repository_tup:
- # Return the string value of prior_installation_required, which defaults to 'False'.
- return str( required_rd_tup[ 4 ] )
- return 'False'
-
def set_repository_attributes( app, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
if remove_from_disk:
relative_install_dir = repository.repo_path( app )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.