galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
July 2014
- 1 participants
- 146 discussions
commit/galaxy-central: greg: Add a CustomDatatypeLoader for Galaxy installs, eliminate the use of the tool shed's datatyps_util module.
by commits-noreply@bitbucket.org 23 Jul '14
by commits-noreply@bitbucket.org 23 Jul '14
23 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/74d7ff952ec9/
Changeset: 74d7ff952ec9
User: greg
Date: 2014-07-23 17:26:49
Summary: Add a CustomDatatypeLoader for Galaxy installs, eliminate the use of the tool shed's datatyps_util module.
Affected #: 7 files
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -12,7 +12,6 @@
import tool_shed.repository_types.util as rt_util
from tool_shed.util import common_util
-from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import readme_util
@@ -25,6 +24,7 @@
from tool_shed.galaxy_install import dependency_display
from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.grids import admin_toolshed_grids
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
@@ -256,17 +256,17 @@
dmh.remove_from_data_manager( tool_shed_repository )
if tool_shed_repository.includes_datatypes:
# Deactivate proprietary datatypes.
- installed_repository_dict = datatype_util.load_installed_datatypes( trans.app,
- tool_shed_repository,
- repository_install_dir,
- deactivate=True )
+ cdl = custom_datatype_manager.CustomDatatypeLoader( trans.app )
+ installed_repository_dict = cdl.load_installed_datatypes( tool_shed_repository,
+ repository_install_dir,
+ deactivate=True )
if installed_repository_dict:
converter_path = installed_repository_dict.get( 'converter_path' )
if converter_path is not None:
- datatype_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
+ cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=True )
display_path = installed_repository_dict.get( 'display_path' )
if display_path is not None:
- datatype_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True )
+ cdl.load_installed_display_applications( installed_repository_dict, deactivate=True )
if remove_from_disk_checked:
try:
# Remove the repository from disk.
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py
@@ -0,0 +1,218 @@
+import logging
+import os
+import tempfile
+
+from galaxy.util import asbool
+
+from tool_shed.util import basic_util
+from tool_shed.util import hg_util
+from tool_shed.util import tool_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class CustomDatatypeLoader( object ):
+
+ def __init__( self, app ):
+ self.app = app
+
+ def alter_config_and_load_prorietary_datatypes( self, datatypes_config, relative_install_dir,
+ deactivate=False, override=True ):
+ """
+ Parse a custom datatypes config (a datatypes_conf.xml file included in an installed
+ tool shed repository) and add information to appropriate element attributes that will
+ enable custom datatype class modules, datatypes converters and display applications
+ to be discovered and properly imported by the datatypes registry. The value of override
+ will be False when a tool shed repository is being installed. Since installation is
+ occurring after the datatypes registry has been initialized, the registry's contents
+ cannot be overridden by conflicting data types.
+ """
+ tree, error_message = xml_util.parse_xml( datatypes_config )
+ if tree is None:
+ return None, None
+ datatypes_config_root = tree.getroot()
+ registration = datatypes_config_root.find( 'registration' )
+ if registration is None:
+ # We have valid XML, but not a valid custom datatypes definition.
+ return None, None
+ sniffers = datatypes_config_root.find( 'sniffers' )
+ converter_path, display_path = self.get_converter_and_display_paths( registration,
+ relative_install_dir )
+ if converter_path:
+ # Path to datatype converters
+ registration.attrib[ 'proprietary_converter_path' ] = converter_path
+ if display_path:
+ # Path to datatype display applications
+ registration.attrib[ 'proprietary_display_path' ] = display_path
+ relative_path_to_datatype_file_name = None
+ datatype_files = datatypes_config_root.find( 'datatype_files' )
+ datatype_class_modules = []
+ if datatype_files is not None:
+ # The <datatype_files> tag set contains any number of <datatype_file> tags.
+ # <datatype_files>
+ # <datatype_file name="gmap.py"/>
+ # <datatype_file name="metagenomics.py"/>
+ # </datatype_files>
+ # We'll add attributes to the datatype tag sets so that the modules can be properly imported
+ # by the datatypes registry.
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ datatype_class_modules.append( os.path.join( root, name ) )
+ break
+ break
+ if datatype_class_modules:
+ for relative_path_to_datatype_file_name in datatype_class_modules:
+ datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
+ for elem in registration.findall( 'datatype' ):
+ # Handle 'type' attribute which should be something like one of the following:
+ # type="gmap:GmapDB"
+ # type="galaxy.datatypes.gmap:GmapDB"
+ dtype = elem.get( 'type', None )
+ if dtype:
+ fields = dtype.split( ':' )
+ proprietary_datatype_module = fields[ 0 ]
+ if proprietary_datatype_module.find( '.' ) >= 0:
+ # Handle the case where datatype_module is "galaxy.datatypes.gmap".
+ proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
+ # The value of proprietary_path must be an absolute path due to job_working_directory.
+ elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
+ elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
+ # Temporarily persist the custom datatypes configuration file so it can be loaded into the
+ # datatypes registry.
+ fd, proprietary_datatypes_config = tempfile.mkstemp( prefix="tmp-toolshed-acalpd" )
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
+ if sniffers is not None:
+ os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ os.chmod( proprietary_datatypes_config, 0644 )
+ # Load custom datatypes
+ self.app.datatypes_registry.load_datatypes( root_dir=self.app.config.root,
+ config=proprietary_datatypes_config,
+ deactivate=deactivate,
+ override=override )
+ if deactivate:
+ # Reload the upload tool to eliminate deactivated datatype extensions from the file_type
+ # select list.
+ tool_util.reload_upload_tools( self.app )
+ else:
+ self.append_to_datatypes_registry_upload_file_formats( registration )
+ tool_util.reload_upload_tools( self.app )
+ if datatype_files is not None:
+ try:
+ os.unlink( proprietary_datatypes_config )
+ except:
+ pass
+ return converter_path, display_path
+
+ def append_to_datatypes_registry_upload_file_formats( self, elem ):
+ # See if we have any datatypes that should be displayed in the upload tool's file_type select list.
+ for datatype_elem in elem.findall( 'datatype' ):
+ extension = datatype_elem.get( 'extension', None )
+ display_in_upload = datatype_elem.get( 'display_in_upload', None )
+ if extension is not None and display_in_upload is not None:
+ display_in_upload = asbool( str( display_in_upload ) )
+ if display_in_upload and extension not in self.app.datatypes_registry.upload_file_formats:
+ self.app.datatypes_registry.upload_file_formats.append( extension )
+
+ def create_repository_dict_for_proprietary_datatypes( self, tool_shed, name, owner, installed_changeset_revision,
+ tool_dicts, converter_path=None, display_path=None ):
+ return dict( tool_shed=tool_shed,
+ repository_name=name,
+ repository_owner=owner,
+ installed_changeset_revision=installed_changeset_revision,
+ tool_dicts=tool_dicts,
+ converter_path=converter_path,
+ display_path=display_path )
+
+ def get_converter_and_display_paths( self, registration_elem, relative_install_dir ):
+ """
+ Find the relative path to data type converters and display applications included
+ in installed tool shed repositories.
+ """
+ converter_path = None
+ display_path = None
+ for elem in registration_elem.findall( 'datatype' ):
+ if not converter_path:
+ # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
+ # if it is not already set. This requires developers to place all converters in the
+ # same subdirectory within the repository hierarchy.
+ for converter in elem.findall( 'converter' ):
+ converter_config = converter.get( 'file', None )
+ if converter_config:
+ converter_config_file_name = basic_util.strip_path( converter_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == converter_config_file_name:
+ # The value of converter_path must be absolute due to job_working_directory.
+ converter_path = os.path.abspath( root )
+ break
+ if converter_path:
+ break
+ if not display_path:
+ # If any of the <datatype> tag sets contain <display> tags, set the display_path
+ # if it is not already set. This requires developers to place all display acpplications
+ # in the same subdirectory within the repository hierarchy.
+ for display_app in elem.findall( 'display' ):
+ display_config = display_app.get( 'file', None )
+ if display_config:
+ display_config_file_name = basic_util.strip_path( display_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == display_config_file_name:
+ # The value of display_path must be absolute due to job_working_directory.
+ display_path = os.path.abspath( root )
+ break
+ if display_path:
+ break
+ if converter_path and display_path:
+ break
+ return converter_path, display_path
+
+ def load_installed_datatype_converters( self, installed_repository_dict, deactivate=False ):
+ """Load or deactivate proprietary datatype converters."""
+ self.app.datatypes_registry.load_datatype_converters( self.app.toolbox,
+ installed_repository_dict=installed_repository_dict,
+ deactivate=deactivate )
+
+ def load_installed_datatypes( self, repository, relative_install_dir, deactivate=False ):
+ """
+ Load proprietary datatypes and return information needed for loading custom
+ datatypes converters and display applications later.
+ """
+ metadata = repository.metadata
+ repository_dict = None
+ datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir )
+ if datatypes_config:
+ converter_path, display_path = \
+ self.alter_config_and_load_prorietary_datatypes( datatypes_config,
+ relative_install_dir,
+ deactivate=deactivate )
+ if converter_path or display_path:
+ # Create a dictionary of tool shed repository related information.
+ repository_dict = \
+ self.create_repository_dict_for_proprietary_datatypes( tool_shed=repository.tool_shed,
+ name=repository.name,
+ owner=repository.owner,
+ installed_changeset_revision=repository.installed_changeset_revision,
+ tool_dicts=metadata.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
+ return repository_dict
+
+ def load_installed_display_applications( self, installed_repository_dict, deactivate=False ):
+ """Load or deactivate custom datatype display applications."""
+ self.app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict,
+ deactivate=deactivate )
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -18,7 +18,6 @@
from tool_shed.util import basic_util
from tool_shed.util import common_util
-from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
@@ -26,6 +25,7 @@
from tool_shed.util import tool_util
from tool_shed.util import xml_util
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
@@ -594,18 +594,19 @@
files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir )
# Load data types required by tools.
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
converter_path, display_path = \
- datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, files_dir, override=False )
+ cdl.alter_config_and_load_prorietary_datatypes( datatypes_config, files_dir, override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = \
- datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
self.app.datatypes_registry.load_datatype_converters( self.app.toolbox, installed_repository_dict=repository_dict )
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -7,12 +7,12 @@
from galaxy import util
from tool_shed.util import common_util
from tool_shed.util import container_util
-from tool_shed.util import datatype_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
from tool_shed.util import xml_util
from galaxy.model.orm import and_
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.galaxy_install.tools import data_manager
@@ -114,17 +114,17 @@
else:
repository_install_dir = os.path.abspath( relative_install_dir )
# Activate proprietary datatypes.
- installed_repository_dict = datatype_util.load_installed_datatypes( self.app,
- repository,
- repository_install_dir,
- deactivate=False )
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+ installed_repository_dict = cdl.load_installed_datatypes( repository,
+ repository_install_dir,
+ deactivate=False )
if installed_repository_dict:
converter_path = installed_repository_dict.get( 'converter_path' )
if converter_path is not None:
- datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=False )
+ cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=False )
display_path = installed_repository_dict.get( 'display_path' )
if display_path is not None:
- datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=False )
+ cdl.load_installed_display_applications( installed_repository_dict, deactivate=False )
def add_entry_to_installed_repository_dependencies_of_installed_repositories( self, repository ):
"""
@@ -732,22 +732,24 @@
self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
def load_proprietary_datatypes( self ):
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
for tool_shed_repository in self.context.query( self.install_model.ToolShedRepository ) \
- .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes==True,
- self.install_model.ToolShedRepository.table.c.deleted==False ) ) \
- .order_by( self.install_model.ToolShedRepository.table.c.id ):
+ .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes==True,
+ self.install_model.ToolShedRepository.table.c.deleted==False ) ) \
+ .order_by( self.install_model.ToolShedRepository.table.c.id ):
relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
if relative_install_dir:
- installed_repository_dict = datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
+ installed_repository_dict = cdl.load_installed_datatypes( tool_shed_repository, relative_install_dir )
if installed_repository_dict:
self.installed_repository_dicts.append( installed_repository_dict )
def load_proprietary_converters_and_display_applications( self, deactivate=False ):
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
for installed_repository_dict in self.installed_repository_dicts:
if installed_repository_dict[ 'converter_path' ]:
- datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate )
+ cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=deactivate )
if installed_repository_dict[ 'display_path' ]:
- datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate )
+ cdl.load_installed_display_applications( installed_repository_dict, deactivate=deactivate )
def purge_repository( self, repository ):
"""Purge a repository with status New (a white ghost) from the database."""
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -14,6 +14,7 @@
from galaxy.util.odict import odict
from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.tools import tool_panel_manager
@@ -22,7 +23,6 @@
from tool_shed.util import basic_util
from tool_shed.util import common_util
-from tool_shed.util import datatype_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
@@ -492,6 +492,7 @@
print '\nThe ToolMigrationManager returned the following error while installing tool dependency ', installed_tool_dependency.name, ':'
print installed_tool_dependency.error_message, '\n\n'
if 'datatypes' in metadata_dict:
+ cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
@@ -499,21 +500,27 @@
self.app.install_model.context.flush()
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-hrc" )
datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir )
- # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
- # after this installation completes.
- converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
+ # Load proprietary data types required by tools. The value of override is not
+ # important here since the Galaxy server will be started after this installation
+ #completes.
+ converter_path, display_path = \
+ cdl.alter_config_and_load_prorietary_datatypes( datatypes_config,
+ repo_install_dir,
+ override=False )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
- repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url,
- name=tool_shed_repository.name,
- owner=self.repository_owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
+ repository_dict = \
+ cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url,
+ name=tool_shed_repository.name,
+ owner=self.repository_owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
if converter_path:
# Load proprietary datatype converters
- self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
+ self.app.datatypes_registry.load_datatype_converters( self.toolbox,
+ installed_repository_dict=repository_dict )
if display_path:
# Load proprietary datatype display applications
self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
diff -r 053943b668af334c480b464fe7351b50284b7099 -r 74d7ff952ec902e68cc99745fdcd3c14e86208b2 lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ /dev/null
@@ -1,187 +0,0 @@
-import logging
-import os
-import tempfile
-from galaxy import eggs
-from galaxy.util import asbool
-from tool_shed.util import basic_util
-from tool_shed.util import hg_util
-from tool_shed.util import tool_util
-from tool_shed.util import xml_util
-import tool_shed.util.shed_util_common as suc
-
-log = logging.getLogger( __name__ )
-
-def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
- """
- Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
- add information to appropriate element attributes that will enable proprietary datatype class modules, datatypes converters
- and display applications to be discovered and properly imported by the datatypes registry. The value of override will
- be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
- has been initialized, the registry's contents cannot be overridden by conflicting data types.
- """
- tree, error_message = xml_util.parse_xml( datatypes_config )
- if tree is None:
- return None, None
- datatypes_config_root = tree.getroot()
- registration = datatypes_config_root.find( 'registration' )
- if registration is None:
- # We have valid XML, but not a valid proprietary datatypes definition.
- return None, None
- sniffers = datatypes_config_root.find( 'sniffers' )
- converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
- if converter_path:
- # Path to datatype converters
- registration.attrib[ 'proprietary_converter_path' ] = converter_path
- if display_path:
- # Path to datatype display applications
- registration.attrib[ 'proprietary_display_path' ] = display_path
- relative_path_to_datatype_file_name = None
- datatype_files = datatypes_config_root.find( 'datatype_files' )
- datatype_class_modules = []
- if datatype_files is not None:
- # The <datatype_files> tag set contains any number of <datatype_file> tags.
- # <datatype_files>
- # <datatype_file name="gmap.py"/>
- # <datatype_file name="metagenomics.py"/>
- # </datatype_files>
- # We'll add attributes to the datatype tag sets so that the modules can be properly imported by the datatypes registry.
- for elem in datatype_files.findall( 'datatype_file' ):
- datatype_file_name = elem.get( 'name', None )
- if datatype_file_name:
- # Find the file in the installed repository.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == datatype_file_name:
- datatype_class_modules.append( os.path.join( root, name ) )
- break
- break
- if datatype_class_modules:
- for relative_path_to_datatype_file_name in datatype_class_modules:
- datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
- for elem in registration.findall( 'datatype' ):
- # Handle 'type' attribute which should be something like one of the following:
- # type="gmap:GmapDB"
- # type="galaxy.datatypes.gmap:GmapDB"
- dtype = elem.get( 'type', None )
- if dtype:
- fields = dtype.split( ':' )
- proprietary_datatype_module = fields[ 0 ]
- if proprietary_datatype_module.find( '.' ) >= 0:
- # Handle the case where datatype_module is "galaxy.datatypes.gmap".
- proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
- # The value of proprietary_path must be an absolute path due to job_working_directory.
- elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
- elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
- # Temporarily persist the proprietary datatypes configuration file so it can be loaded into the datatypes registry.
- fd, proprietary_datatypes_config = tempfile.mkstemp( prefix="tmp-toolshed-acalpd" )
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<datatypes>\n' )
- os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
- if sniffers is not None:
- os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
- os.write( fd, '</datatypes>\n' )
- os.close( fd )
- os.chmod( proprietary_datatypes_config, 0644 )
- # Load proprietary datatypes
- app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
- if deactivate:
- # Reload the upload tool to eliminate deactivated datatype extensions from the file_type select list.
- tool_util.reload_upload_tools( app )
- else:
- append_to_datatypes_registry_upload_file_formats( app, registration )
- tool_util.reload_upload_tools( app )
- if datatype_files is not None:
- try:
- os.unlink( proprietary_datatypes_config )
- except:
- pass
- return converter_path, display_path
-
-def append_to_datatypes_registry_upload_file_formats( app, elem ):
- # See if we have any datatypes that should be displayed in the upload tool's file_type select list.
- for datatype_elem in elem.findall( 'datatype' ):
- extension = datatype_elem.get( 'extension', None )
- display_in_upload = datatype_elem.get( 'display_in_upload', None )
- if extension is not None and display_in_upload is not None:
- display_in_upload = asbool( str( display_in_upload ) )
- if display_in_upload and extension not in app.datatypes_registry.upload_file_formats:
- app.datatypes_registry.upload_file_formats.append( extension )
-
-def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
- return dict( tool_shed=tool_shed,
- repository_name=name,
- repository_owner=owner,
- installed_changeset_revision=installed_changeset_revision,
- tool_dicts=tool_dicts,
- converter_path=converter_path,
- display_path=display_path )
-
-def get_converter_and_display_paths( registration_elem, relative_install_dir ):
- """Find the relative path to data type converters and display applications included in installed tool shed repositories."""
- converter_path = None
- display_path = None
- for elem in registration_elem.findall( 'datatype' ):
- if not converter_path:
- # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
- # if it is not already set. This requires developers to place all converters in the
- # same subdirectory within the repository hierarchy.
- for converter in elem.findall( 'converter' ):
- converter_config = converter.get( 'file', None )
- if converter_config:
- converter_config_file_name = basic_util.strip_path( converter_config )
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == converter_config_file_name:
- # The value of converter_path must be absolute due to job_working_directory.
- converter_path = os.path.abspath( root )
- break
- if converter_path:
- break
- if not display_path:
- # If any of the <datatype> tag sets contain <display> tags, set the display_path
- # if it is not already set. This requires developers to place all display acpplications
- # in the same subdirectory within the repository hierarchy.
- for display_app in elem.findall( 'display' ):
- display_config = display_app.get( 'file', None )
- if display_config:
- display_config_file_name = basic_util.strip_path( display_config )
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == display_config_file_name:
- # The value of display_path must be absolute due to job_working_directory.
- display_path = os.path.abspath( root )
- break
- if display_path:
- break
- if converter_path and display_path:
- break
- return converter_path, display_path
-
-def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
- # Load or deactivate proprietary datatype converters
- app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-
-def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
- # Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
- metadata = repository.metadata
- repository_dict = None
- datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir )
- if datatypes_config:
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
- if converter_path or display_path:
- # Create a dictionary of tool shed repository related information.
- repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=repository.tool_shed,
- name=repository.name,
- owner=repository.owner,
- installed_changeset_revision=repository.installed_changeset_revision,
- tool_dicts=metadata.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
- return repository_dict
-
-def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
- # Load or deactivate proprietary datatype display applications
- app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: More standardized (if still terse) exception handling in AuthenticationController.
by commits-noreply@bitbucket.org 23 Jul '14
by commits-noreply@bitbucket.org 23 Jul '14
23 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/053943b668af/
Changeset: 053943b668af
User: jmchilton
Date: 2014-07-23 16:16:08
Summary: More standardized (if still terse) exception handling in AuthenticationController.
Affected #: 3 files
diff -r f24d7ac8ee9549fc19435aa1448854e9024982ff -r 053943b668af334c480b464fe7351b50284b7099 lib/galaxy/exceptions/__init__.py
--- a/lib/galaxy/exceptions/__init__.py
+++ b/lib/galaxy/exceptions/__init__.py
@@ -86,6 +86,11 @@
err_code = error_codes.USER_REQUEST_INVALID_PARAMETER
+class AuthenticationFailed( MessageException ):
+ status_code = 401
+ err_code = error_codes.USER_AUTHENTICATION_FAILED
+
+
class AuthenticationRequired( MessageException ):
status_code = 403
#TODO: as 401 and send WWW-Authenticate: ???
diff -r f24d7ac8ee9549fc19435aa1448854e9024982ff -r 053943b668af334c480b464fe7351b50284b7099 lib/galaxy/exceptions/error_codes.json
--- a/lib/galaxy/exceptions/error_codes.json
+++ b/lib/galaxy/exceptions/error_codes.json
@@ -60,6 +60,11 @@
"message": "Supplied incorrect or incompatible tool meta parameters."
},
{
+ "name": "USER_AUTHENTICATION_FAILED",
+ "code": 401001,
+ "message": "Authentication failed, invalid credentials supplied."
+ },
+ {
"name": "USER_NO_API_KEY",
"code": 403001,
"message": "API authentication required for this request"
diff -r f24d7ac8ee9549fc19435aa1448854e9024982ff -r 053943b668af334c480b464fe7351b50284b7099 lib/galaxy/webapps/galaxy/api/authenticate.py
--- a/lib/galaxy/webapps/galaxy/api/authenticate.py
+++ b/lib/galaxy/webapps/galaxy/api/authenticate.py
@@ -12,11 +12,10 @@
"""
from base64 import b64decode
-from paste.httpexceptions import HTTPBadRequest
from urllib import unquote
-from galaxy import web
-from galaxy.exceptions import ObjectNotFound
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy import exceptions
from galaxy.web.base.controller import BaseAPIController, CreatesApiKeysMixin
import logging
@@ -25,7 +24,7 @@
class AuthenticationController( BaseAPIController, CreatesApiKeysMixin ):
- @web.expose_api_anonymous
+ @expose_api_anonymous
def get_api_key( self, trans, **kwd ):
"""
def get_api_key( self, trans, **kwd )
@@ -43,7 +42,7 @@
if ( len( user ) is not 1 ):
# DB is inconsistent and we have more users with same email
- raise ObjectNotFound
+ raise exceptions.ObjectNotFound()
else:
user = user[0]
is_valid_user = user.check_password( password )
@@ -54,8 +53,7 @@
key = self.create_api_key( trans, user )
return dict( api_key=key )
else:
- trans.response.status = 500
- return "invalid password"
+ raise exceptions.AuthenticationFailed()
def _decode_baseauth( self, encoded_str ):
"""
@@ -81,7 +79,7 @@
try:
email, password = b64decode( split[ 0 ] ).split( ':' )
except:
- raise HTTPBadRequest
+ raise exceptions.ActionInputError()
# If there are only two elements, check the first and ensure it says
# 'basic' so that we know we're about to decode the right thing. If not,
@@ -91,13 +89,13 @@
try:
email, password = b64decode( split[ 1 ] ).split( ':' )
except:
- raise HTTPBadRequest
+ raise exceptions.ActionInputError()
else:
- raise HTTPBadRequest
+ raise exceptions.ActionInputError()
# If there are more than 2 elements, something crazy must be happening.
# Bail.
else:
- raise HTTPBadRequest
+ raise exceptions.ActionInputError()
return unquote( email ), unquote( password )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes in exception handlers in the Tool Shed's create_categories and create_users API modules.
by commits-noreply@bitbucket.org 23 Jul '14
by commits-noreply@bitbucket.org 23 Jul '14
23 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f24d7ac8ee95/
Changeset: f24d7ac8ee95
User: greg
Date: 2014-07-23 15:47:21
Summary: Fixes in exception handlers in the Tool Shed's create_categories and create_users API modules.
Affected #: 2 files
diff -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 -r f24d7ac8ee9549fc19435aa1448854e9024982ff lib/tool_shed/scripts/api/create_categories.py
--- a/lib/tool_shed/scripts/api/create_categories.py
+++ b/lib/tool_shed/scripts/api/create_categories.py
@@ -44,7 +44,7 @@
response = submit( url, data, api_key )
except Exception, e:
response = str( e )
- log.exception( str( e ) )
+ print "Error attempting to create category using URL: ", url, " exception: ", str( e )
create_response_dict = dict( response=response )
create_response_dicts.append( create_response_dict )
diff -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 -r f24d7ac8ee9549fc19435aa1448854e9024982ff lib/tool_shed/scripts/api/create_users.py
--- a/lib/tool_shed/scripts/api/create_users.py
+++ b/lib/tool_shed/scripts/api/create_users.py
@@ -47,7 +47,7 @@
response = submit( url, data, api_key )
except Exception, e:
response = str( e )
- log.exception( str( e ) )
+ print "Error attempting to create user using URL: ", url, " exception: ", str( e )
create_response_dict = dict( response=response )
create_response_dicts.append( create_response_dict )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Move some functions out of the Tool Shed's shed_util_common module.
by commits-noreply@bitbucket.org 22 Jul '14
by commits-noreply@bitbucket.org 22 Jul '14
22 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0c62ccbc8624/
Changeset: 0c62ccbc8624
User: greg
Date: 2014-07-22 22:46:30
Summary: Move some functions out of the Tool Shed's shed_util_common module.
Affected #: 7 files
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/capsule/capsule_manager.py
--- a/lib/tool_shed/capsule/capsule_manager.py
+++ b/lib/tool_shed/capsule/capsule_manager.py
@@ -9,6 +9,7 @@
from time import strftime
from galaxy import web
+from galaxy.model.orm import and_
from galaxy.util import asbool
from galaxy.util import CHUNK_SIZE
from galaxy.util.odict import odict
@@ -408,7 +409,7 @@
flush = True
# Do not allow dependent repository revisions to be automatically installed if population
# resulted in errors.
- dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( self.app, repository_metadata )
+ dependent_downloadable_revisions = self.get_dependent_downloadable_revisions( repository_metadata )
for dependent_downloadable_revision in dependent_downloadable_revisions:
if dependent_downloadable_revision.downloadable:
dependent_downloadable_revision.downloadable = False
@@ -541,6 +542,66 @@
archives.append( archive_file_name )
return archives, error_message
+ def get_dependent_downloadable_revisions( self, repository_metadata ):
+ """
+ Return all repository_metadata records that are downloadable and that depend upon the received
+ repository_metadata record.
+ """
+ # This method is called only from the tool shed.
+ sa_session = self.app.model.context.current
+ rm_changeset_revision = repository_metadata.changeset_revision
+ rm_repository = repository_metadata.repository
+ rm_repository_name = str( rm_repository.name )
+ rm_repository_owner = str( rm_repository.user.username )
+ dependent_downloadable_revisions = []
+ for repository in sa_session.query( self.app.model.Repository ) \
+ .filter( and_( self.app.model.Repository.table.c.id != rm_repository.id,
+ self.app.model.Repository.table.c.deleted == False,
+ self.app.model.Repository.table.c.deprecated == False ) ):
+ downloadable_revisions = repository.downloadable_revisions
+ if downloadable_revisions:
+ for downloadable_revision in downloadable_revisions:
+ if downloadable_revision.has_repository_dependencies:
+ metadata = downloadable_revision.metadata
+ if metadata:
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ for repository_dependencies_tup in repository_dependencies_tups:
+ tool_shed, \
+ name, \
+ owner, \
+ changeset_revision, \
+ prior_installation_required, \
+ only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependencies_tup )
+ if name == rm_repository_name and owner == rm_repository_owner:
+ # We've discovered a repository revision that depends upon the repository associated
+ # with the received repository_metadata record, but we need to make sure it depends
+ # upon the revision.
+ if changeset_revision == rm_changeset_revision:
+ dependent_downloadable_revisions.append( downloadable_revision )
+ else:
+ # Make sure the defined changeset_revision is current.
+ defined_repository_metadata = \
+ sa_session.query( self.app.model.RepositoryMetadata ) \
+ .filter( self.app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \
+ .first()
+ if defined_repository_metadata is None:
+ # The defined changeset_revision is not associated with a repository_metadata
+ # record, so updates must be necessary.
+ defined_repository = suc.get_repository_by_name_and_owner( self.app, name, owner )
+ defined_repo = hg_util.get_repo_for_repository( self.app,
+ repository=defined_repository,
+ repo_path=None,
+ create=False )
+ updated_changeset_revision = \
+ suc.get_next_downloadable_changeset_revision( defined_repository,
+ defined_repo,
+ changeset_revision )
+ if updated_changeset_revision == rm_changeset_revision:
+ dependent_downloadable_revisions.append( downloadable_revision )
+ return dependent_downloadable_revisions
+
def get_export_info_dict( self, export_info_file_path ):
"""
Parse the export_info.xml file contained within the capsule and return a dictionary
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -342,8 +342,8 @@
installed_rd_tups = []
missing_rd_tups = []
for tsr in repository.repository_dependencies:
- prior_installation_required = suc.set_prior_installation_required( self.app, repository, tsr )
- only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr )
+ prior_installation_required = self.set_prior_installation_required( repository, tsr )
+ only_if_compiling_contained_td = self.set_only_if_compiling_contained_td( repository, tsr )
rd_tup = [ tsr.tool_shed,
tsr.name,
tsr.owner,
@@ -957,6 +957,47 @@
return True
return False
+ def set_only_if_compiling_contained_td( self, repository, required_repository ):
+ """
+ Return True if the received required_repository is only needed to compile a tool
+ dependency defined for the received repository.
+ """
+ # This method is called only from Galaxy when rendering repository dependencies
+ # for an installed tool shed repository.
+ # TODO: Do we need to check more than changeset_revision here?
+ required_repository_tup = [ required_repository.tool_shed, \
+ required_repository.name, \
+ required_repository.owner, \
+ required_repository.changeset_revision ]
+ for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
+ partial_tup = tup[ 0:4 ]
+ if partial_tup == required_repository_tup:
+ return 'True'
+ return 'False'
+
+ def set_prior_installation_required( self, repository, required_repository ):
+ """
+ Return True if the received required_repository must be installed before the
+ received repository.
+ """
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app,
+ str( required_repository.tool_shed ) )
+ required_repository_tup = [ tool_shed_url,
+ str( required_repository.name ),
+ str( required_repository.owner ),
+ str( required_repository.changeset_revision ) ]
+ # Get the list of repository dependency tuples associated with the received repository
+ # where prior_installation_required is True.
+ required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of
+ for required_rd_tup in required_rd_tups_that_must_be_installed:
+ # Repository dependency tuples in metadata include a prior_installation_required value,
+ # so strip it for comparision.
+ partial_required_rd_tup = required_rd_tup[ 0:4 ]
+ if partial_required_rd_tup == required_repository_tup:
+ # Return the string value of prior_installation_required, which defaults to 'False'.
+ return str( required_rd_tup[ 4 ] )
+ return 'False'
+
def update_existing_tool_dependency( self, repository, original_dependency_dict, new_dependencies_dict ):
"""
Update an exsiting tool dependency whose definition was updated in a change set
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
--- a/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
+++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
@@ -2,9 +2,9 @@
Class encapsulating the management of repository dependencies installed or being installed
into Galaxy from the Tool Shed.
"""
-
import json
import logging
+import os
import urllib
import urllib2
@@ -63,11 +63,10 @@
break
if d_repository is None:
# The dependent repository is not in the received list so look in the database.
- d_repository = suc.get_or_create_tool_shed_repository( self.app,
- d_toolshed,
- d_name,
- d_owner,
- d_changeset_revision )
+ d_repository = self.get_or_create_tool_shed_repository( d_toolshed,
+ d_name,
+ d_owner,
+ d_changeset_revision )
# Process each repository_dependency defined for the current dependent repository.
for repository_dependency_components_list in val:
required_repository = None
@@ -87,11 +86,10 @@
break
if required_repository is None:
# The required repository is not in the received list so look in the database.
- required_repository = suc.get_or_create_tool_shed_repository( self.app,
- rd_toolshed,
- rd_name,
- rd_owner,
- rd_changeset_revision )
+ required_repository = self.get_or_create_tool_shed_repository( rd_toolshed,
+ rd_name,
+ rd_owner,
+ rd_changeset_revision )
# Ensure there is a repository_dependency relationship between d_repository and required_repository.
rrda = None
for rd in d_repository.repository_dependencies:
@@ -257,6 +255,35 @@
self.build_repository_dependency_relationships( all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts
+ def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ):
+ """
+ Return a tool shed repository database record defined by the combination of
+ tool shed, repository name, repository owner and changeset_revision or
+ installed_changeset_revision. A new tool shed repository record will be
+ created if one is not located.
+ """
+ install_model = self.app.install_model
+ # We store the port in the database.
+ tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+ # This method is used only in Galaxy, not the tool shed.
+ repository = suc.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision )
+ if not repository:
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision )
+ repository = suc.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=None,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=install_model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=owner,
+ dist_to_shed=False )
+ return repository
+
def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
"""
Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/metadata/repository_metadata_manager.py
--- a/lib/tool_shed/metadata/repository_metadata_manager.py
+++ b/lib/tool_shed/metadata/repository_metadata_manager.py
@@ -410,7 +410,7 @@
repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( self.app, changeset_hash )
+ skip_tool_test = self.get_skip_tool_test_by_changeset_revision( changeset_hash )
if skip_tool_test:
# We found a skip_tool_test record associated with the changeset_revision,
# so see if it has a valid repository_revision.
@@ -521,6 +521,16 @@
return self.sa_session.query( self.app.model.Repository ) \
.filter( self.app.model.Repository.table.c.deleted == False )
+ def get_skip_tool_test_by_changeset_revision( self, changeset_revision ):
+ """
+ Return a skip_tool_test record whose initial_changeset_revision is the received
+ changeset_revision.
+ """
+ # There should only be one, but we'll use first() so callers won't have to handle exceptions.
+ return self.sa_session.query( self.app.model.SkipToolTest ) \
+ .filter( self.app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \
+ .first()
+
def new_datatypes_metadata_required( self, repository_metadata, metadata_dict ):
"""
Compare the last saved metadata for each datatype in the repository with the new metadata
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/tools/data_table_manager.py
--- a/lib/tool_shed/tools/data_table_manager.py
+++ b/lib/tool_shed/tools/data_table_manager.py
@@ -2,8 +2,9 @@
import os
import shutil
+from xml.etree import ElementTree as XmlET
+
from tool_shed.util import hg_util
-from tool_shed.util import shed_util_common as suc
from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -14,6 +15,36 @@
def __init__( self, app ):
self.app = app
+ def generate_repository_info_elem( self, tool_shed, repository_name, changeset_revision, owner,
+ parent_elem=None, **kwd ):
+ """Create and return an ElementTree repository info Element."""
+ if parent_elem is None:
+ elem = XmlET.Element( 'tool_shed_repository' )
+ else:
+ elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' )
+ tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = XmlET.SubElement( elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ #add additional values
+ #TODO: enhance additional values to allow e.g. use of dict values that will recurse
+ for key, value in kwd.iteritems():
+ new_elem = XmlET.SubElement( elem, key )
+ new_elem.text = value
+ return elem
+
+ def generate_repository_info_elem_from_repository( self, tool_shed_repository, parent_elem=None, **kwd ):
+ return self.generate_repository_info_elem( tool_shed_repository.tool_shed,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.owner,
+ parent_elem=parent_elem,
+ **kwd )
+
def get_tool_index_sample_files( self, sample_files ):
"""
Try to return the list of all appropriate tool data sample files included
@@ -129,7 +160,7 @@
if path:
file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
# Store repository info in the table tag set for trace-ability.
- repo_elem = suc.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
+ repo_elem = self.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
if elems:
# Remove old data_table
os.unlink( tool_data_table_conf_filename )
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -7,6 +7,18 @@
log = logging.getLogger( __name__ )
+def get_latest_changeset_revision( app, repository, repo ):
+ repository_tip = repository.tip( app )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
+ app.security.encode_id( repository.id ),
+ repository_tip )
+ if repository_metadata and repository_metadata.downloadable:
+ return repository_tip
+ changeset_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False )
+ if changeset_revisions:
+ return changeset_revisions[ -1 ]
+ return hg_util.INITIAL_CHANGELOG_HASH
+
def get_latest_repository_metadata( app, decoded_repository_id, downloadable=False ):
"""Get last metadata defined for a specified repository from the database."""
sa_session = app.model.context.current
@@ -15,7 +27,7 @@
if downloadable:
changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo )
else:
- changeset_revision = suc.get_latest_changeset_revision( app, repository, repo )
+ changeset_revision = get_latest_changeset_revision( app, repository, repo )
return suc.get_repository_metadata_by_changeset_revision( app,
app.security.encode_id( repository.id ),
changeset_revision )
diff -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a -r 0c62ccbc86246c9aebf8ff78c550234dfa6d5bd5 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -17,7 +17,6 @@
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
-from xml.etree import ElementTree as XmlET
from urllib2 import HTTPError
log = logging.getLogger( __name__ )
@@ -165,35 +164,6 @@
components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
return components_list
-def generate_repository_info_elem( tool_shed, repository_name, changeset_revision, owner, parent_elem=None, **kwd ):
- """Create and return an ElementTree repository info Element."""
- if parent_elem is None:
- elem = XmlET.Element( 'tool_shed_repository' )
- else:
- elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' )
- tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' )
- tool_shed_elem.text = tool_shed
- repository_name_elem = XmlET.SubElement( elem, 'repository_name' )
- repository_name_elem.text = repository_name
- repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' )
- repository_owner_elem.text = owner
- changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' )
- changeset_revision_elem.text = changeset_revision
- #add additional values
- #TODO: enhance additional values to allow e.g. use of dict values that will recurse
- for key, value in kwd.iteritems():
- new_elem = XmlET.SubElement( elem, key )
- new_elem.text = value
- return elem
-
-def generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=None, **kwd ):
- return generate_repository_info_elem( tool_shed_repository.tool_shed,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.owner,
- parent_elem=parent_elem,
- **kwd )
-
def generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=None ):
"""Generate the URL for sharing a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -291,61 +261,6 @@
return repository_metadata
return None
-def get_dependent_downloadable_revisions( app, repository_metadata ):
- """
- Return all repository_metadata records that are downloadable and that depend upon the received
- repository_metadata record.
- """
- # This method is called only from the tool shed.
- sa_session = app.model.context.current
- rm_changeset_revision = repository_metadata.changeset_revision
- rm_repository = repository_metadata.repository
- rm_repository_name = str( rm_repository.name )
- rm_repository_owner = str( rm_repository.user.username )
- dependent_downloadable_revisions = []
- for repository in sa_session.query( app.model.Repository ) \
- .filter( and_( app.model.Repository.table.c.id != rm_repository.id,
- app.model.Repository.table.c.deleted == False,
- app.model.Repository.table.c.deprecated == False ) ):
- downloadable_revisions = repository.downloadable_revisions
- if downloadable_revisions:
- for downloadable_revision in downloadable_revisions:
- if downloadable_revision.has_repository_dependencies:
- metadata = downloadable_revision.metadata
- if metadata:
- repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
- repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
- for repository_dependencies_tup in repository_dependencies_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependencies_tup )
- if name == rm_repository_name and owner == rm_repository_owner:
- # We've discovered a repository revision that depends upon the repository associated
- # with the received repository_metadata record, but we need to make sure it depends
- # upon the revision.
- if changeset_revision == rm_changeset_revision:
- dependent_downloadable_revisions.append( downloadable_revision )
- else:
- # Make sure the defined changeset_revision is current.
- defined_repository_metadata = \
- sa_session.query( app.model.RepositoryMetadata ) \
- .filter( app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \
- .first()
- if defined_repository_metadata is None:
- # The defined changeset_revision is not associated with a repository_metadata
- # record, so updates must be necessary.
- defined_repository = get_repository_by_name_and_owner( app, name, owner )
- defined_repo = hg_util.get_repo_for_repository( app,
- repository=defined_repository,
- repo_path=None,
- create=False )
- updated_changeset_revision = \
- get_next_downloadable_changeset_revision( defined_repository,
- defined_repo,
- changeset_revision )
- if updated_changeset_revision == rm_changeset_revision:
- dependent_downloadable_revisions.append( downloadable_revision )
- return dependent_downloadable_revisions
-
def get_ids_of_tool_shed_repositories_being_installed( app, as_string=False ):
installing_repository_ids = []
new_status = app.install_model.ToolShedRepository.installation_status.NEW
@@ -365,18 +280,6 @@
return ','.join( installing_repository_ids )
return installing_repository_ids
-def get_latest_changeset_revision( app, repository, repo ):
- repository_tip = repository.tip( app )
- repository_metadata = get_repository_metadata_by_changeset_revision( app,
- app.security.encode_id( repository.id ),
- repository_tip )
- if repository_metadata and repository_metadata.downloadable:
- return repository_tip
- changeset_revisions = get_ordered_metadata_changeset_revisions( repository, repo, downloadable=False )
- if changeset_revisions:
- return changeset_revisions[ -1 ]
- return hg_util.INITIAL_CHANGELOG_HASH
-
def get_latest_downloadable_changeset_revision( app, repository, repo ):
repository_tip = repository.tip( app )
repository_metadata = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( repository.id ), repository_tip )
@@ -441,37 +344,11 @@
continue
return key
-def get_or_create_tool_shed_repository( app, tool_shed, name, owner, changeset_revision ):
+def get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ):
"""
- Return a tool shed repository database record defined by the combination of
- tool shed, repository name, repository owner and changeset_revision or
- installed_changeset_revision. A new tool shed repository record will be
- created if one is not located.
+ Return an ordered list of changeset_revisions that are associated with metadata
+ where order is defined by the repository changelog.
"""
- install_model = app.install_model
- # We store the port in the database.
- tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
- # This method is used only in Galaxy, not the tool shed.
- repository = get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision )
- if not repository:
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed )
- repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- ctx_rev = get_ctx_rev( app, tool_shed_url, name, owner, changeset_revision )
- repository = create_or_update_tool_shed_repository( app=app,
- name=name,
- description=None,
- installed_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=install_model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=owner,
- dist_to_shed=False )
- return repository
-
-def get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ):
- """Return an ordered list of changeset_revisions that are associated with metadata where order is defined by the repository changelog."""
if downloadable:
metadata_revisions = repository.downloadable_revisions
else:
@@ -491,9 +368,11 @@
def get_prior_import_or_install_required_dict( app, tsr_ids, repo_info_dicts ):
"""
- This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies
- are being installed. Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained
- in the received list of tsr_ids and whose associated repository must be imported or installed prior to the repository associated with the tsr_id key.
+ This method is used in the Tool Shed when exporting a repository and its dependencies,
+ and in Galaxy when a repository and its dependencies are being installed. Return a
+ dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids,
+ each of which is contained in the received list of tsr_ids and whose associated repository
+ must be imported or installed prior to the repository associated with the tsr_id key.
"""
# Initialize the dictionary.
prior_import_or_install_required_dict = {}
@@ -698,11 +577,14 @@
def get_repository_ids_requiring_prior_import_or_install( app, tsr_ids, repository_dependencies ):
"""
- This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies
- are being installed. Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received
- tsr_ids. If so, then determine whether that required repository should be imported / installed prior to its dependent repository. Return a list
- of encoded repository ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be imported / installed
- prior to the dependent repository associated with the received repository_dependencies.
+ This method is used in the Tool Shed when exporting a repository and its dependencies,
+ and in Galaxy when a repository and its dependencies are being installed. Inspect the
+ received repository_dependencies and determine if the encoded id of each required
+ repository is in the received tsr_ids. If so, then determine whether that required
+ repository should be imported / installed prior to its dependent repository. Return a
+ list of encoded repository ids, each of which is contained in the received list of tsr_ids,
+ and whose associated repositories must be imported / installed prior to the dependent
+ repository associated with the received repository_dependencies.
"""
prior_tsr_ids = []
if repository_dependencies:
@@ -710,20 +592,32 @@
if key in [ 'description', 'root_key' ]:
continue
for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ tool_shed, \
+ name, \
+ owner, \
+ changeset_revision, \
+ prior_installation_required, \
+ only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
- # If only_if_compiling_contained_td is False, then the repository dependency is not required to be installed prior to the dependent
- # repository even if prior_installation_required is True. This is because the only meaningful content of the repository dependency
- # is its contained tool dependency, which is required in order to compile the dependent repository's tool dependency. In the scenario
- # where the repository dependency is not installed prior to the dependent repository's tool dependency compilation process, the tool
- # dependency compilation framework will install the repository dependency prior to compilation of the dependent repository's tool
- # dependency.
+ # If only_if_compiling_contained_td is False, then the repository dependency
+ # is not required to be installed prior to the dependent repository even if
+ # prior_installation_required is True. This is because the only meaningful
+ # content of the repository dependency is its contained tool dependency, which
+ # is required in order to compile the dependent repository's tool dependency.
+ # In the scenario where the repository dependency is not installed prior to the
+ # dependent repository's tool dependency compilation process, the tool dependency
+ # compilation framework will install the repository dependency prior to compilation
+ # of the dependent repository's tool dependency.
if not util.asbool( only_if_compiling_contained_td ):
if util.asbool( prior_installation_required ):
if is_tool_shed_client( app ):
# We store the port, if one exists, in the database.
tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
- repository = get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision )
+ repository = get_repository_for_dependency_relationship( app,
+ tool_shed,
+ name,
+ owner,
+ changeset_revision )
else:
repository = get_repository_by_name_and_owner( app, name, owner )
if repository:
@@ -779,14 +673,6 @@
query = app.model.context.query( app.model.Repository )
return query
-def get_skip_tool_test_by_changeset_revision( app, changeset_revision ):
- """Return a skip_tool_test record whose initial_changeset_revision is the received changeset_revision."""
- # There should only be one, but we'll use first() so callers won't have to handle exceptions.
- sa_session = app.model.context.current
- return sa_session.query( app.model.SkipToolTest ) \
- .filter( app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \
- .first()
-
def get_tool_panel_config_tool_path_install_dir( app, repository ):
"""
Return shed-related tool panel config, the tool_path configured in it, and the relative path to
@@ -1220,42 +1106,6 @@
text = re.sub( r'\.\. image:: (?!https?://)/?(.+)', r'.. image:: %s/\1' % route_to_images, text )
return text
-def set_only_if_compiling_contained_td( repository, required_repository ):
- """
- Return True if the received required_repository is only needed to compile a tool
- dependency defined for the received repository.
- """
- # This method is called only from Galaxy when rendering repository dependencies
- # for an installed tool shed repository.
- # TODO: Do we need to check more than changeset_revision here?
- required_repository_tup = [ required_repository.tool_shed, \
- required_repository.name, \
- required_repository.owner, \
- required_repository.changeset_revision ]
- for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
- partial_tup = tup[ 0:4 ]
- if partial_tup == required_repository_tup:
- return 'True'
- return 'False'
-
-def set_prior_installation_required( app, repository, required_repository ):
- """Return True if the received required_repository must be installed before the received repository."""
- # This method is called only from Galaxy when rendering repository dependencies for an installed Tool Shed repository.
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( required_repository.tool_shed ) )
- required_repository_tup = [ tool_shed_url,
- str( required_repository.name ),
- str( required_repository.owner ),
- str( required_repository.changeset_revision ) ]
- # Get the list of repository dependency tuples associated with the received repository where prior_installation_required is True.
- required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of
- for required_rd_tup in required_rd_tups_that_must_be_installed:
- # Repository dependency tuples in metadata include a prior_installation_required value, so strip it for comparision.
- partial_required_rd_tup = required_rd_tup[ 0:4 ]
- if partial_required_rd_tup == required_repository_tup:
- # Return the string value of prior_installation_required, which defaults to 'False'.
- return str( required_rd_tup[ 4 ] )
- return 'False'
-
def set_repository_attributes( app, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
if remove_from_disk:
relative_install_dir = repository.repo_path( app )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Rename the Tool Shed's repository_maintenance_util module to be repository_util.
by commits-noreply@bitbucket.org 22 Jul '14
by commits-noreply@bitbucket.org 22 Jul '14
22 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4ee9e584dbc0/
Changeset: 4ee9e584dbc0
User: greg
Date: 2014-07-22 21:38:59
Summary: Rename the Tool Shed's repository_maintenance_util module to be repository_util.
Affected #: 9 files
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -16,7 +16,7 @@
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import readme_util
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
@@ -48,7 +48,7 @@
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
repository_id = kwd[ 'id' ]
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
try:
trans.app.installed_repository_manager.activate_repository( repository )
except Exception, e:
@@ -74,7 +74,7 @@
def browse_repository( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako',
repository=repository,
message=message,
@@ -107,7 +107,7 @@
action='purge_repository',
**kwd ) )
if operation == "activate or reinstall":
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
if repository.uninstalled:
# Since we're reinstalling the repository we need to find the latest changeset revision to which it can
# be updated so that we can reset the metadata if necessary. This will ensure that information about
@@ -208,7 +208,7 @@
def check_for_updates( self, trans, **kwd ):
"""Send a request to the relevant tool shed to see if there are any updates."""
repository_id = kwd.get( 'id', None )
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
params = '?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
( web.url_for( '/', qualified=True ),
@@ -234,7 +234,7 @@
status = kwd.get( 'status', 'done' )
remove_from_disk = kwd.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
- tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = \
suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
@@ -397,7 +397,7 @@
of the installed tool shed repository in Galaxy. We need it so that we can derive the tool shed from which
it was installed.
"""
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
if tool_shed_url is None or repository_name is None or repository_owner is None or changeset_revision is None:
message = "Unable to retrieve tool dependencies from the Tool Shed because one or more of the following required "
@@ -422,7 +422,7 @@
Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall
an updated revision of an uninstalled tool shed repository.
"""
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
if tool_shed_url is None or repository_name is None or repository_owner is None or changeset_revision is None:
message = "Unable to retrieve updated repository information from the Tool Shed because one or more of the following "
@@ -516,7 +516,7 @@
status = kwd.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
if repository_id is not None:
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
if repository is not None:
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
name = str( repository.name )
@@ -574,7 +574,7 @@
dependencies are included in the updated revision.
"""
updating_repository_id = kwd.get( 'updating_repository_id', None )
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, updating_repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, updating_repository_id )
# All received dependencies need to be installed - confirmed by the caller.
encoded_tool_dependencies_dict = kwd.get( 'encoded_tool_dependencies_dict', None )
if encoded_tool_dependencies_dict is not None:
@@ -750,7 +750,7 @@
if repository_id is None:
return trans.show_error_message( 'Missing required encoded repository id.' )
operation = kwd.get( 'operation', None )
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
if repository is None:
return trans.show_error_message( 'Invalid repository specified.' )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
@@ -1279,7 +1279,7 @@
repository_id = kwd.get( 'id', None )
new_kwd = {}
if repository_id is not None:
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
if repository:
if repository.is_new:
if kwd.get( 'purge_repository_button', False ):
@@ -1317,7 +1317,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
- tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
install_repository_dependencies = CheckboxField.is_checked( kwd.get( 'install_repository_dependencies', '' ) )
@@ -1387,14 +1387,14 @@
tool_dependencies = metadata.get( 'tool_dependencies', None )
else:
tool_dependencies = None
- repo_info_dict = repository_maintenance_util.create_repo_info_dict( trans.app,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = repository_util.create_repo_info_dict( trans.app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
if repo_info_dict not in repo_info_dicts:
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
@@ -1460,7 +1460,7 @@
action='browse_repositories',
message=message,
status=status ) )
- tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
rrm = RepairRepositoryManager( trans.app )
if kwd.get( 'repair_repository_button', False ):
encoded_repair_dict = kwd.get( 'repair_dict', None )
@@ -1478,7 +1478,7 @@
repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
repositories_for_repair.append( repository )
return self.repair_tool_shed_repositories( trans, rrm, repositories_for_repair, ordered_repo_info_dicts )
- tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
repair_dict = rrm.get_repair_dict( tool_shed_repository )
encoded_repair_dict = encoding_util.tool_shed_encode( repair_dict )
ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] )
@@ -1540,7 +1540,7 @@
repository_id = kwd.get( 'id', None )
latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
- tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
metadata = tool_shed_repository.metadata
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( tool_shed_repository.tool_shed ) )
@@ -1600,14 +1600,14 @@
repository_dependencies = \
rdim.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
tool_shed_repository )
- repo_info_dict = repository_maintenance_util.create_repo_info_dict( trans.app,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- ctx_rev=tool_shed_repository.ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = repository_util.create_repo_info_dict( trans.app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ ctx_rev=tool_shed_repository.ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
irm = trans.app.installed_repository_manager
dependencies_for_repository_dict = irm.get_dependencies_for_repository( tool_shed_url,
repo_info_dict,
@@ -1727,7 +1727,7 @@
@web.require_admin
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, id )
repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
@@ -1766,7 +1766,7 @@
@web.require_admin
def reset_to_install( self, trans, **kwd ):
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
suc.set_repository_attributes( trans.app,
repository,
@@ -1792,7 +1792,7 @@
Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed
repository and update the metadata for the repository's revision in the Galaxy database.
"""
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ),
str( repository.owner ),
@@ -2076,8 +2076,7 @@
for repository in trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
.filter( trans.install_model.ToolShedRepository.table.c.deleted == False ):
ok, updated = \
- repository_maintenance_util.check_or_update_tool_shed_status_for_installed_repository( trans.app,
- repository )
+ repository_util.check_or_update_tool_shed_status_for_installed_repository( trans.app, repository )
if ok:
success_count += 1
else:
@@ -2093,8 +2092,7 @@
repository_id = kwd.get( 'id', None )
repository = suc.get_tool_shed_repository_by_id( trans.app, repository_id )
ok, updated = \
- repository_maintenance_util.check_or_update_tool_shed_status_for_installed_repository( trans.app,
- repository )
+ repository_util.check_or_update_tool_shed_status_for_installed_repository( trans.app, repository )
if ok:
if updated:
message = "The tool shed status for repository <b>%s</b> has been updated." % str( repository.name )
@@ -2113,7 +2111,7 @@
def view_tool_metadata( self, trans, repository_id, tool_id, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
repository_metadata = repository.metadata
shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -19,7 +19,7 @@
from tool_shed.util import basic_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_util
@@ -194,10 +194,10 @@
includes_tools_for_display_in_tool_panel, \
has_repository_dependencies, \
has_repository_dependencies_only_if_compiling_contained_td = \
- repository_maintenance_util.get_repo_info_dict( trans.app,
- trans.user,
- encoded_repository_id,
- changeset_revision )
+ repository_util.get_repo_info_dict( trans.app,
+ trans.user,
+ encoded_repository_id,
+ changeset_revision )
return repository_dict, repository_metadata_dict, repo_info_dict
else:
log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/galaxy/webapps/tool_shed/controllers/admin.py
--- a/lib/galaxy/webapps/tool_shed/controllers/admin.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -11,7 +11,7 @@
from tool_shed.metadata import repository_metadata_manager
from tool_shed.util import metadata_util
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import shed_util_common as suc
log = logging.getLogger( __name__ )
@@ -320,14 +320,14 @@
def manage_role_associations( self, trans, **kwd ):
"""Manage users, groups and repositories associated with a role."""
role_id = kwd.get( 'id', None )
- role = repository_maintenance_util.get_role_by_id( trans.app, role_id )
+ role = repository_util.get_role_by_id( trans.app, role_id )
# We currently only have a single role associated with a repository, the repository admin role.
repository_role_association = role.repositories[ 0 ]
repository = repository_role_association.repository
- associations_dict = repository_maintenance_util.handle_role_associations( trans.app,
- role,
- repository,
- **kwd )
+ associations_dict = repository_util.handle_role_associations( trans.app,
+ role,
+ repository,
+ **kwd )
in_users = associations_dict.get( 'in_users', [] )
out_users = associations_dict.get( 'out_users', [] )
in_groups = associations_dict.get( 'in_groups', [] )
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -31,7 +31,7 @@
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
from tool_shed.util import readme_util
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import search_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_util
@@ -1070,7 +1070,7 @@
repository_type = kwd.get( 'repository_type', rt_util.UNRESTRICTED )
if kwd.get( 'create_repository_button', False ):
error = False
- message = repository_maintenance_util.validate_repository_name( trans.app, name, trans.user )
+ message = repository_util.validate_repository_name( trans.app, name, trans.user )
if message:
error = True
if not description:
@@ -1079,13 +1079,13 @@
if error:
status = 'error'
else:
- repository, message = repository_maintenance_util.create_repository( trans.app,
- name,
- repository_type,
- description,
- long_description,
- user_id=trans.user.id,
- category_ids=category_ids )
+ repository, message = repository_util.create_repository( trans.app,
+ name,
+ repository_type,
+ description,
+ long_description,
+ user_id=trans.user.id,
+ category_ids=category_ids )
trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
message=message,
@@ -1820,7 +1820,7 @@
repository_id, changeset_revision = tup
repo_info_dict, cur_includes_tools, cur_includes_tool_dependencies, cur_includes_tools_for_display_in_tool_panel, \
cur_has_repository_dependencies, cur_has_repository_dependencies_only_if_compiling_contained_td = \
- repository_maintenance_util.get_repo_info_dict( trans.app, trans.user, repository_id, changeset_revision )
+ repository_util.get_repo_info_dict( trans.app, trans.user, repository_id, changeset_revision )
if cur_has_repository_dependencies and not has_repository_dependencies:
has_repository_dependencies = True
if cur_has_repository_dependencies_only_if_compiling_contained_td and not has_repository_dependencies_only_if_compiling_contained_td:
@@ -1951,16 +1951,16 @@
after_changeset_revision=changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = repository_maintenance_util.create_repo_info_dict( app=trans.app,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- repository_metadata=repository_metadata,
- tool_dependencies=None,
- repository_dependencies=None )
+ repo_info_dict = repository_util.create_repo_info_dict( app=trans.app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
includes_data_managers = False
includes_datatypes = False
includes_tools = False
@@ -2274,7 +2274,7 @@
repository.long_description = long_description
flush_needed = True
if repository.times_downloaded == 0 and repo_name != repository.name:
- message = repository_maintenance_util.validate_repository_name( trans.app, repo_name, user )
+ message = repository_util.validate_repository_name( trans.app, repo_name, user )
if message:
error = True
else:
@@ -2284,12 +2284,12 @@
trans.app.hgweb_config_manager.change_entry( old_lhs, new_lhs, repo_dir )
# Change the entry in the repository's hgrc file.
hgrc_file = os.path.join( repo_dir, '.hg', 'hgrc' )
- repository_maintenance_util.change_repository_name_in_hgrc_file( hgrc_file, repo_name )
+ repository_util.change_repository_name_in_hgrc_file( hgrc_file, repo_name )
# Rename the repository's admin role to match the new repository name.
repository_admin_role = repository.admin_role
repository_admin_role.name = \
- repository_maintenance_util.get_repository_admin_role_name( str( repo_name ),
- str( repository.user.username ) )
+ repository_util.get_repository_admin_role_name( str( repo_name ),
+ str( repository.user.username ) )
trans.sa_session.add( repository_admin_role )
repository.name = repo_name
flush_needed = True
@@ -2379,7 +2379,7 @@
current_allow_push_list = current_allow_push.split( ',' )
else:
current_allow_push_list = []
- allow_push_select_field = repository_maintenance_util.build_allow_push_select_field( trans, current_allow_push_list )
+ allow_push_select_field = repository_util.build_allow_push_select_field( trans, current_allow_push_list )
checked = alerts_checked or user.email in email_alerts
alerts_check_box = CheckboxField( 'alerts', checked=checked )
changeset_revision_select_field = grids_util.build_changeset_revision_select_field( trans,
@@ -2521,10 +2521,10 @@
if repository_metadata:
metadata = repository_metadata.metadata
role = repository.admin_role
- associations_dict = repository_maintenance_util.handle_role_associations( trans.app,
- role,
- repository,
- **kwd )
+ associations_dict = repository_util.handle_role_associations( trans.app,
+ role,
+ repository,
+ **kwd )
in_users = associations_dict.get( 'in_users', [] )
out_users = associations_dict.get( 'out_users', [] )
in_groups = associations_dict.get( 'in_groups', [] )
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/tool_shed/capsule/capsule_manager.py
--- a/lib/tool_shed/capsule/capsule_manager.py
+++ b/lib/tool_shed/capsule/capsule_manager.py
@@ -24,7 +24,7 @@
from tool_shed.util import common_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import xml_util
@@ -462,13 +462,13 @@
else:
category_ids.append( self.app.security.encode_id( category.id ) )
# Create the repository record in the database.
- repository, create_message = repository_maintenance_util.create_repository( self.app,
- name,
- type,
- description,
- long_description,
- user_id=user_id,
- category_ids=category_ids )
+ repository, create_message = repository_util.create_repository( self.app,
+ name,
+ type,
+ description,
+ long_description,
+ user_id=user_id,
+ category_ids=category_ids )
if create_message:
results_message += create_message
# Populate the new repository with the contents of exported repository archive.
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
--- a/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
+++ b/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
@@ -9,7 +9,7 @@
from tool_shed.metadata import metadata_generator
from tool_shed.util import common_util
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_util
from tool_shed.util import xml_util
@@ -78,7 +78,7 @@
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
invalid_file_tups = []
metadata_dict = {}
- repository = repository_maintenance_util.get_installed_tool_shed_repository( self.app, id )
+ repository = repository_util.get_installed_tool_shed_repository( self.app, id )
repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( self.app )
if relative_install_dir:
@@ -118,7 +118,7 @@
unsuccessful_count = 0
for repository_id in repository_ids:
try:
- repository = repository_maintenance_util.get_installed_tool_shed_repository( self.app, repository_id )
+ repository = repository_util.get_installed_tool_shed_repository( self.app, repository_id )
owner = str( repository.owner )
invalid_file_tups, metadata_dict = \
self.reset_all_metadata_on_installed_repository( repository_id )
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/tool_shed/galaxy_install/repair_repository_manager.py
--- a/lib/tool_shed/galaxy_install/repair_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py
@@ -10,7 +10,7 @@
from tool_shed.util import common_util
from tool_shed.util import container_util
from tool_shed.util import shed_util_common as suc
-from tool_shed.util import repository_maintenance_util
+from tool_shed.util import repository_util
from tool_shed.util import tool_dependency_util
@@ -127,16 +127,16 @@
new_tool_panel_section_label=tool_panel_section_name )
else:
tool_dependencies = None
- repo_info_dict = repository_maintenance_util.create_repo_info_dict( app=self.app,
- repository_clone_url=repository_clone_url,
- changeset_revision=repository.changeset_revision,
- ctx_rev=repository.ctx_rev,
- repository_owner=repository.owner,
- repository_name=repository.name,
- repository=None,
- repository_metadata=None,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = repository_util.create_repo_info_dict( app=self.app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=repository.changeset_revision,
+ ctx_rev=repository.ctx_rev,
+ repository_owner=repository.owner,
+ repository_name=repository.name,
+ repository=None,
+ repository_metadata=None,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
return repo_info_dict, tool_panel_section_key
def repair_tool_shed_repository( self, repository, repo_info_dict ):
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/tool_shed/util/repository_maintenance_util.py
--- a/lib/tool_shed/util/repository_maintenance_util.py
+++ /dev/null
@@ -1,330 +0,0 @@
-import ConfigParser
-import logging
-import os
-import re
-
-from galaxy import util
-from galaxy import web
-from galaxy.web.form_builder import build_select_field
-from galaxy.webapps.tool_shed.model import directory_hash_id
-
-from tool_shed.dependencies.repository import relation_builder
-
-from tool_shed.util import common_util
-from tool_shed.util import hg_util
-from tool_shed.util import shed_util_common as suc
-
-log = logging.getLogger( __name__ )
-
-VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" )
-
-def build_allow_push_select_field( trans, current_push_list, selected_value='none' ):
- options = []
- for user in trans.sa_session.query( trans.model.User ):
- if user.username not in current_push_list:
- options.append( user )
- return build_select_field( trans,
- objs=options,
- label_attr='username',
- select_field_name='allow_push',
- selected_value=selected_value,
- refresh_on_change=False,
- multiple=True )
-
-def change_repository_name_in_hgrc_file( hgrc_file, new_name ):
- config = ConfigParser.ConfigParser()
- config.read( hgrc_file )
- config.read( hgrc_file )
- config.set( 'web', 'name', new_name )
- new_file = open( hgrc_file, 'wb' )
- config.write( new_file )
- new_file.close()
-
-def check_or_update_tool_shed_status_for_installed_repository( app, repository ):
- updated = False
- tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( app, repository )
- if tool_shed_status_dict:
- ok = True
- if tool_shed_status_dict != repository.tool_shed_status:
- repository.tool_shed_status = tool_shed_status_dict
- app.install_model.context.add( repository )
- app.install_model.context.flush()
- updated = True
- else:
- ok = False
- return ok, updated
-
-def create_repo_info_dict( app, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None,
- repository=None, repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
- """
- Return a dictionary that includes all of the information needed to install a repository into a local
- Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined
- for the repository, as well as the defined tool dependencies.
-
- This method is called from Galaxy under four scenarios:
- 1. During the tool shed repository installation process via the tool shed's get_repository_information()
- method. In this case both the received repository and repository_metadata will be objects, but
- tool_dependencies and repository_dependencies will be None.
- 2. When getting updates for an installed repository where the updates include newly defined repository
- dependency definitions. This scenario is similar to 1. above. The tool shed's get_repository_information()
- method is the caller, and both the received repository and repository_metadata will be objects, but
- tool_dependencies and repository_dependencies will be None.
- 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
- updates available. In this case, both repository and repository_metadata will be None, but tool_dependencies
- and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes
- definitions for them.
- 4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
- available. In this case, this method is reached via the tool shed's get_updated_repository_information()
- method, and both repository and repository_metadata will be objects but tool_dependencies and
- repository_dependencies will be None.
- """
- repo_info_dict = {}
- repository = suc.get_repository_by_name_and_owner( app, repository_name, repository_owner )
- if app.name == 'tool_shed':
- # We're in the tool shed.
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
- app.security.encode_id( repository.id ),
- changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- tool_shed_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
- rb = relation_builder.RelationBuilder( app, repository, repository_metadata, tool_shed_url )
- # Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
- tool_dependencies = metadata.get( 'tool_dependencies', {} )
- if tool_dependencies:
- new_tool_dependencies = {}
- for dependency_key, requirements_dict in tool_dependencies.items():
- if dependency_key in [ 'set_environment' ]:
- new_set_environment_dict_list = []
- for set_environment_dict in requirements_dict:
- set_environment_dict[ 'repository_name' ] = repository_name
- set_environment_dict[ 'repository_owner' ] = repository_owner
- set_environment_dict[ 'changeset_revision' ] = changeset_revision
- new_set_environment_dict_list.append( set_environment_dict )
- new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
- else:
- requirements_dict[ 'repository_name' ] = repository_name
- requirements_dict[ 'repository_owner' ] = repository_owner
- requirements_dict[ 'changeset_revision' ] = changeset_revision
- new_tool_dependencies[ dependency_key ] = requirements_dict
- tool_dependencies = new_tool_dependencies
- # Cast unicode to string, with the exception of description, since it is free text and can contain special characters.
- repo_info_dict[ str( repository.name ) ] = ( repository.description,
- str( repository_clone_url ),
- str( changeset_revision ),
- str( ctx_rev ),
- str( repository_owner ),
- repository_dependencies,
- tool_dependencies )
- return repo_info_dict
-
-def create_repository( app, name, type, description, long_description, user_id, category_ids=[] ):
- sa_session = app.model.context.current
- # Add the repository record to the database.
- repository = app.model.Repository( name=name,
- type=type,
- description=description,
- long_description=long_description,
- user_id=user_id )
- # Flush to get the id.
- sa_session.add( repository )
- sa_session.flush()
- # Create an admin role for the repository.
- repository_admin_role = create_repository_admin_role( app, repository )
- # Determine the repository's repo_path on disk.
- dir = os.path.join( app.config.file_path, *directory_hash_id( repository.id ) )
- # Create directory if it does not exist.
- if not os.path.exists( dir ):
- os.makedirs( dir )
- # Define repo name inside hashed directory.
- repository_path = os.path.join( dir, "repo_%d" % repository.id )
- # Create local repository directory.
- if not os.path.exists( repository_path ):
- os.makedirs( repository_path )
- # Create the local repository.
- repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repository_path, create=True )
- # Add an entry in the hgweb.config file for the local repository.
- lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
- app.hgweb_config_manager.add_entry( lhs, repository_path )
- # Create a .hg/hgrc file for the local repository.
- hg_util.create_hgrc_file( app, repository )
- flush_needed = False
- if category_ids:
- # Create category associations
- for category_id in category_ids:
- category = sa_session.query( app.model.Category ) \
- .get( app.security.decode_id( category_id ) )
- rca = app.model.RepositoryCategoryAssociation( repository, category )
- sa_session.add( rca )
- flush_needed = True
- if flush_needed:
- sa_session.flush()
- # Update the repository registry.
- app.repository_registry.add_entry( repository )
- message = "Repository <b>%s</b> has been created." % str( repository.name )
- return repository, message
-
-def create_repository_admin_role( app, repository ):
- """
- Create a new role with name-spaced name based on the repository name and its owner's public user
- name. This will ensure that the tole name is unique.
- """
- sa_session = app.model.context.current
- name = get_repository_admin_role_name( str( repository.name ), str( repository.user.username ) )
- description = 'A user or group member with this role can administer this repository.'
- role = app.model.Role( name=name, description=description, type=app.model.Role.types.SYSTEM )
- sa_session.add( role )
- sa_session.flush()
- # Associate the role with the repository owner.
- ura = app.model.UserRoleAssociation( repository.user, role )
- # Associate the role with the repository.
- rra = app.model.RepositoryRoleAssociation( repository, role )
- sa_session.add( rra )
- sa_session.flush()
- return role
-
-def get_installed_tool_shed_repository( app, id ):
- """Get a tool shed repository record from the Galaxy database defined by the id."""
- return app.install_model.context.query( app.install_model.ToolShedRepository ) \
- .get( app.security.decode_id( id ) )
-
-def get_repo_info_dict( app, user, repository_id, changeset_revision ):
- repository = suc.get_repository_in_tool_shed( app, repository_id )
- repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
- repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
- repository_id,
- changeset_revision )
- if not repository_metadata:
- # The received changeset_revision is no longer installable, so get the next changeset_revision
- # in the repository's changelog. This generally occurs only with repositories of type
- # repository_suite_definition or tool_dependency_definition.
- next_downloadable_changeset_revision = \
- suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
- if next_downloadable_changeset_revision:
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
- repository_id,
- next_downloadable_changeset_revision )
- if repository_metadata:
- # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption
- # is not valid we'll have to enhance the callers to handle repository_metadata values of None in the
- # returned repo_info_dict.
- metadata = repository_metadata.metadata
- if 'tools' in metadata:
- includes_tools = True
- else:
- includes_tools = False
- includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
- repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
- repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
- has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
- suc.get_repository_dependency_types( repository_dependencies )
- if 'tool_dependencies' in metadata:
- includes_tool_dependencies = True
- else:
- includes_tool_dependencies = False
- else:
- # Here's where we may have to handle enhancements to the callers. See above comment.
- includes_tools = False
- has_repository_dependencies = False
- has_repository_dependencies_only_if_compiling_contained_td = False
- includes_tool_dependencies = False
- includes_tools_for_display_in_tool_panel = False
- ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = create_repo_info_dict( app=app,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- repository_metadata=repository_metadata,
- tool_dependencies=None,
- repository_dependencies=None )
- return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
- has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
-
-def get_repository_admin_role_name( repository_name, repository_owner ):
- return '%s_%s_admin' % ( str( repository_name ), str( repository_owner ) )
-
-def get_role_by_id( app, role_id ):
- """Get a Role from the database by id."""
- sa_session = app.model.context.current
- return sa_session.query( app.model.Role ).get( app.security.decode_id( role_id ) )
-
-def handle_role_associations( app, role, repository, **kwd ):
- sa_session = app.model.context.current
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- repository_owner = repository.user
- if kwd.get( 'manage_role_associations_button', False ):
- in_users_list = util.listify( kwd.get( 'in_users', [] ) )
- in_users = [ sa_session.query( app.model.User ).get( x ) for x in in_users_list ]
- # Make sure the repository owner is always associated with the repostory's admin role.
- owner_associated = False
- for user in in_users:
- if user.id == repository_owner.id:
- owner_associated = True
- break
- if not owner_associated:
- in_users.append( repository_owner )
- message += "The repository owner must always be associated with the repository's administrator role. "
- status = 'error'
- in_groups_list = util.listify( kwd.get( 'in_groups', [] ) )
- in_groups = [ sa_session.query( app.model.Group ).get( x ) for x in in_groups_list ]
- in_repositories = [ repository ]
- app.security_agent.set_entity_role_associations( roles=[ role ],
- users=in_users,
- groups=in_groups,
- repositories=in_repositories )
- sa_session.refresh( role )
- message += "Role <b>%s</b> has been associated with %d users, %d groups and %d repositories. " % \
- ( str( role.name ), len( in_users ), len( in_groups ), len( in_repositories ) )
- in_users = []
- out_users = []
- in_groups = []
- out_groups = []
- for user in sa_session.query( app.model.User ) \
- .filter( app.model.User.table.c.deleted==False ) \
- .order_by( app.model.User.table.c.email ):
- if user in [ x.user for x in role.users ]:
- in_users.append( ( user.id, user.email ) )
- else:
- out_users.append( ( user.id, user.email ) )
- for group in sa_session.query( app.model.Group ) \
- .filter( app.model.Group.table.c.deleted==False ) \
- .order_by( app.model.Group.table.c.name ):
- if group in [ x.group for x in role.groups ]:
- in_groups.append( ( group.id, group.name ) )
- else:
- out_groups.append( ( group.id, group.name ) )
- associations_dict = dict( in_users=in_users,
- out_users=out_users,
- in_groups=in_groups,
- out_groups=out_groups,
- message=message,
- status=status )
- return associations_dict
-
-def validate_repository_name( app, name, user ):
- # Repository names must be unique for each user, must be at least four characters
- # in length and must contain only lower-case letters, numbers, and the '_' character.
- if name in [ 'None', None, '' ]:
- return 'Enter the required repository name.'
- if name in [ 'repos' ]:
- return "The term <b>%s</b> is a reserved word in the tool shed, so it cannot be used as a repository name." % name
- check_existing = suc.get_repository_by_name_and_owner( app, name, user.username )
- if check_existing is not None:
- if check_existing.deleted:
- return 'You have a deleted repository named <b>%s</b>, so choose a different name.' % name
- else:
- return "You already have a repository named <b>%s</b>, so choose a different name." % name
- if len( name ) < 4:
- return "Repository names must be at least 4 characters in length."
- if len( name ) > 80:
- return "Repository names cannot be more than 80 characters in length."
- if not( VALID_REPOSITORYNAME_RE.match( name ) ):
- return "Repository names must contain only lower-case letters, numbers and underscore <b>_</b>."
- return ''
diff -r 9cce4ead10795278446af4ab84eecfc4e2b26dce -r 4ee9e584dbc016727e4c90f73bbf53081f775c5a lib/tool_shed/util/repository_util.py
--- /dev/null
+++ b/lib/tool_shed/util/repository_util.py
@@ -0,0 +1,330 @@
+import ConfigParser
+import logging
+import os
+import re
+
+from galaxy import util
+from galaxy import web
+from galaxy.web.form_builder import build_select_field
+from galaxy.webapps.tool_shed.model import directory_hash_id
+
+from tool_shed.dependencies.repository import relation_builder
+
+from tool_shed.util import common_util
+from tool_shed.util import hg_util
+from tool_shed.util import shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" )
+
+def build_allow_push_select_field( trans, current_push_list, selected_value='none' ):
+ options = []
+ for user in trans.sa_session.query( trans.model.User ):
+ if user.username not in current_push_list:
+ options.append( user )
+ return build_select_field( trans,
+ objs=options,
+ label_attr='username',
+ select_field_name='allow_push',
+ selected_value=selected_value,
+ refresh_on_change=False,
+ multiple=True )
+
+def change_repository_name_in_hgrc_file( hgrc_file, new_name ):
+ config = ConfigParser.ConfigParser()
+ config.read( hgrc_file )
+ config.read( hgrc_file )
+ config.set( 'web', 'name', new_name )
+ new_file = open( hgrc_file, 'wb' )
+ config.write( new_file )
+ new_file.close()
+
+def check_or_update_tool_shed_status_for_installed_repository( app, repository ):
+ updated = False
+ tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( app, repository )
+ if tool_shed_status_dict:
+ ok = True
+ if tool_shed_status_dict != repository.tool_shed_status:
+ repository.tool_shed_status = tool_shed_status_dict
+ app.install_model.context.add( repository )
+ app.install_model.context.flush()
+ updated = True
+ else:
+ ok = False
+ return ok, updated
+
+def create_repo_info_dict( app, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None,
+ repository=None, repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
+ """
+ Return a dictionary that includes all of the information needed to install a repository into a local
+ Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined
+ for the repository, as well as the defined tool dependencies.
+
+ This method is called from Galaxy under four scenarios:
+ 1. During the tool shed repository installation process via the tool shed's get_repository_information()
+ method. In this case both the received repository and repository_metadata will be objects, but
+ tool_dependencies and repository_dependencies will be None.
+ 2. When getting updates for an installed repository where the updates include newly defined repository
+ dependency definitions. This scenario is similar to 1. above. The tool shed's get_repository_information()
+ method is the caller, and both the received repository and repository_metadata will be objects, but
+ tool_dependencies and repository_dependencies will be None.
+ 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
+ updates available. In this case, both repository and repository_metadata will be None, but tool_dependencies
+ and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes
+ definitions for them.
+ 4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
+ available. In this case, this method is reached via the tool shed's get_updated_repository_information()
+ method, and both repository and repository_metadata will be objects but tool_dependencies and
+ repository_dependencies will be None.
+ """
+ repo_info_dict = {}
+ repository = suc.get_repository_by_name_and_owner( app, repository_name, repository_owner )
+ if app.name == 'tool_shed':
+ # We're in the tool shed.
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
+ app.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ tool_shed_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+ rb = relation_builder.RelationBuilder( app, repository, repository_metadata, tool_shed_url )
+ # Get a dictionary of all repositories upon which the contents of the received repository depends.
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+ tool_dependencies = metadata.get( 'tool_dependencies', {} )
+ if tool_dependencies:
+ new_tool_dependencies = {}
+ for dependency_key, requirements_dict in tool_dependencies.items():
+ if dependency_key in [ 'set_environment' ]:
+ new_set_environment_dict_list = []
+ for set_environment_dict in requirements_dict:
+ set_environment_dict[ 'repository_name' ] = repository_name
+ set_environment_dict[ 'repository_owner' ] = repository_owner
+ set_environment_dict[ 'changeset_revision' ] = changeset_revision
+ new_set_environment_dict_list.append( set_environment_dict )
+ new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
+ else:
+ requirements_dict[ 'repository_name' ] = repository_name
+ requirements_dict[ 'repository_owner' ] = repository_owner
+ requirements_dict[ 'changeset_revision' ] = changeset_revision
+ new_tool_dependencies[ dependency_key ] = requirements_dict
+ tool_dependencies = new_tool_dependencies
+ # Cast unicode to string, with the exception of description, since it is free text and can contain special characters.
+ repo_info_dict[ str( repository.name ) ] = ( repository.description,
+ str( repository_clone_url ),
+ str( changeset_revision ),
+ str( ctx_rev ),
+ str( repository_owner ),
+ repository_dependencies,
+ tool_dependencies )
+ return repo_info_dict
+
+def create_repository( app, name, type, description, long_description, user_id, category_ids=[] ):
+ sa_session = app.model.context.current
+ # Add the repository record to the database.
+ repository = app.model.Repository( name=name,
+ type=type,
+ description=description,
+ long_description=long_description,
+ user_id=user_id )
+ # Flush to get the id.
+ sa_session.add( repository )
+ sa_session.flush()
+ # Create an admin role for the repository.
+ repository_admin_role = create_repository_admin_role( app, repository )
+ # Determine the repository's repo_path on disk.
+ dir = os.path.join( app.config.file_path, *directory_hash_id( repository.id ) )
+ # Create directory if it does not exist.
+ if not os.path.exists( dir ):
+ os.makedirs( dir )
+ # Define repo name inside hashed directory.
+ repository_path = os.path.join( dir, "repo_%d" % repository.id )
+ # Create local repository directory.
+ if not os.path.exists( repository_path ):
+ os.makedirs( repository_path )
+ # Create the local repository.
+ repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repository_path, create=True )
+ # Add an entry in the hgweb.config file for the local repository.
+ lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
+ app.hgweb_config_manager.add_entry( lhs, repository_path )
+ # Create a .hg/hgrc file for the local repository.
+ hg_util.create_hgrc_file( app, repository )
+ flush_needed = False
+ if category_ids:
+ # Create category associations
+ for category_id in category_ids:
+ category = sa_session.query( app.model.Category ) \
+ .get( app.security.decode_id( category_id ) )
+ rca = app.model.RepositoryCategoryAssociation( repository, category )
+ sa_session.add( rca )
+ flush_needed = True
+ if flush_needed:
+ sa_session.flush()
+ # Update the repository registry.
+ app.repository_registry.add_entry( repository )
+ message = "Repository <b>%s</b> has been created." % str( repository.name )
+ return repository, message
+
+def create_repository_admin_role( app, repository ):
+ """
+ Create a new role with name-spaced name based on the repository name and its owner's public user
+ name. This will ensure that the tole name is unique.
+ """
+ sa_session = app.model.context.current
+ name = get_repository_admin_role_name( str( repository.name ), str( repository.user.username ) )
+ description = 'A user or group member with this role can administer this repository.'
+ role = app.model.Role( name=name, description=description, type=app.model.Role.types.SYSTEM )
+ sa_session.add( role )
+ sa_session.flush()
+ # Associate the role with the repository owner.
+ ura = app.model.UserRoleAssociation( repository.user, role )
+ # Associate the role with the repository.
+ rra = app.model.RepositoryRoleAssociation( repository, role )
+ sa_session.add( rra )
+ sa_session.flush()
+ return role
+
+def get_installed_tool_shed_repository( app, id ):
+ """Get a tool shed repository record from the Galaxy database defined by the id."""
+ return app.install_model.context.query( app.install_model.ToolShedRepository ) \
+ .get( app.security.decode_id( id ) )
+
+def get_repo_info_dict( app, user, repository_id, changeset_revision ):
+ repository = suc.get_repository_in_tool_shed( app, repository_id )
+ repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+ repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
+ repository_id,
+ changeset_revision )
+ if not repository_metadata:
+ # The received changeset_revision is no longer installable, so get the next changeset_revision
+ # in the repository's changelog. This generally occurs only with repositories of type
+ # repository_suite_definition or tool_dependency_definition.
+ next_downloadable_changeset_revision = \
+ suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+ if next_downloadable_changeset_revision:
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
+ repository_id,
+ next_downloadable_changeset_revision )
+ if repository_metadata:
+ # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption
+ # is not valid we'll have to enhance the callers to handle repository_metadata values of None in the
+ # returned repo_info_dict.
+ metadata = repository_metadata.metadata
+ if 'tools' in metadata:
+ includes_tools = True
+ else:
+ includes_tools = False
+ includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+ suc.get_repository_dependency_types( repository_dependencies )
+ if 'tool_dependencies' in metadata:
+ includes_tool_dependencies = True
+ else:
+ includes_tool_dependencies = False
+ else:
+ # Here's where we may have to handle enhancements to the callers. See above comment.
+ includes_tools = False
+ has_repository_dependencies = False
+ has_repository_dependencies_only_if_compiling_contained_td = False
+ includes_tool_dependencies = False
+ includes_tools_for_display_in_tool_panel = False
+ ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+ repo_info_dict = create_repo_info_dict( app=app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
+ return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
+ has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
+
+def get_repository_admin_role_name( repository_name, repository_owner ):
+ return '%s_%s_admin' % ( str( repository_name ), str( repository_owner ) )
+
+def get_role_by_id( app, role_id ):
+ """Get a Role from the database by id."""
+ sa_session = app.model.context.current
+ return sa_session.query( app.model.Role ).get( app.security.decode_id( role_id ) )
+
+def handle_role_associations( app, role, repository, **kwd ):
+ sa_session = app.model.context.current
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ repository_owner = repository.user
+ if kwd.get( 'manage_role_associations_button', False ):
+ in_users_list = util.listify( kwd.get( 'in_users', [] ) )
+ in_users = [ sa_session.query( app.model.User ).get( x ) for x in in_users_list ]
+ # Make sure the repository owner is always associated with the repostory's admin role.
+ owner_associated = False
+ for user in in_users:
+ if user.id == repository_owner.id:
+ owner_associated = True
+ break
+ if not owner_associated:
+ in_users.append( repository_owner )
+ message += "The repository owner must always be associated with the repository's administrator role. "
+ status = 'error'
+ in_groups_list = util.listify( kwd.get( 'in_groups', [] ) )
+ in_groups = [ sa_session.query( app.model.Group ).get( x ) for x in in_groups_list ]
+ in_repositories = [ repository ]
+ app.security_agent.set_entity_role_associations( roles=[ role ],
+ users=in_users,
+ groups=in_groups,
+ repositories=in_repositories )
+ sa_session.refresh( role )
+ message += "Role <b>%s</b> has been associated with %d users, %d groups and %d repositories. " % \
+ ( str( role.name ), len( in_users ), len( in_groups ), len( in_repositories ) )
+ in_users = []
+ out_users = []
+ in_groups = []
+ out_groups = []
+ for user in sa_session.query( app.model.User ) \
+ .filter( app.model.User.table.c.deleted==False ) \
+ .order_by( app.model.User.table.c.email ):
+ if user in [ x.user for x in role.users ]:
+ in_users.append( ( user.id, user.email ) )
+ else:
+ out_users.append( ( user.id, user.email ) )
+ for group in sa_session.query( app.model.Group ) \
+ .filter( app.model.Group.table.c.deleted==False ) \
+ .order_by( app.model.Group.table.c.name ):
+ if group in [ x.group for x in role.groups ]:
+ in_groups.append( ( group.id, group.name ) )
+ else:
+ out_groups.append( ( group.id, group.name ) )
+ associations_dict = dict( in_users=in_users,
+ out_users=out_users,
+ in_groups=in_groups,
+ out_groups=out_groups,
+ message=message,
+ status=status )
+ return associations_dict
+
+def validate_repository_name( app, name, user ):
+ # Repository names must be unique for each user, must be at least four characters
+ # in length and must contain only lower-case letters, numbers, and the '_' character.
+ if name in [ 'None', None, '' ]:
+ return 'Enter the required repository name.'
+ if name in [ 'repos' ]:
+ return "The term <b>%s</b> is a reserved word in the tool shed, so it cannot be used as a repository name." % name
+ check_existing = suc.get_repository_by_name_and_owner( app, name, user.username )
+ if check_existing is not None:
+ if check_existing.deleted:
+ return 'You have a deleted repository named <b>%s</b>, so choose a different name.' % name
+ else:
+ return "You already have a repository named <b>%s</b>, so choose a different name." % name
+ if len( name ) < 4:
+ return "Repository names must be at least 4 characters in length."
+ if len( name ) > 80:
+ return "Repository names cannot be more than 80 characters in length."
+ if not( VALID_REPOSITORYNAME_RE.match( name ) ):
+ return "Repository names must contain only lower-case letters, numbers and underscore <b>_</b>."
+ return ''
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add a ToolDataTableManager for managing tool data tables in the Tool Shed.
by commits-noreply@bitbucket.org 22 Jul '14
by commits-noreply@bitbucket.org 22 Jul '14
22 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9cce4ead1079/
Changeset: 9cce4ead1079
User: greg
Date: 2014-07-22 21:12:40
Summary: Add a ToolDataTableManager for managing tool data tables in the Tool Shed.
Affected #: 9 files
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -13,13 +13,13 @@
from tool_shed.dependencies import attribute_handlers
from tool_shed.galaxy_install import dependency_display
from tool_shed.metadata import repository_metadata_manager
-import tool_shed.repository_types.util as rt_util
+from tool_shed.repository_types import util as rt_util
+from tool_shed.tools import data_table_manager
from tool_shed.util import basic_util
from tool_shed.util import commit_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
-from tool_shed.util import tool_util
from tool_shed.util import xml_util
from galaxy import eggs
@@ -100,6 +100,7 @@
if uploaded_file or uploaded_directory:
rdah = attribute_handlers.RepositoryDependencyAttributeHandler( trans.app, unpopulate=False )
tdah = attribute_handlers.ToolDependencyAttributeHandler( trans.app, unpopulate=False )
+ tdtm = data_table_manager.ToolDataTableManager( trans.app )
ok = True
isgzip = False
isbz2 = False
@@ -221,7 +222,7 @@
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
# by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables
# dictionary.
- error, error_message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, full_path )
+ error, error_message = tdtm.handle_sample_tool_data_table_conf_file( full_path, persist=False )
if error:
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
@@ -309,7 +310,7 @@
message += invalid_repository_dependencies_message
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( trans.app )
+ tdtm.reset_tool_data_tables()
if uploaded_directory:
basic_util.remove_dir( uploaded_directory )
trans.response.send_redirect( web.url_for( controller='repository',
@@ -323,7 +324,7 @@
basic_util.remove_dir( uploaded_directory )
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( trans.app )
+ tdtm.reset_tool_data_tables()
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/tool_shed/repository/upload.mako',
repository=repository,
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -35,6 +35,7 @@
from tool_shed.galaxy_install.tools import data_manager
from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import data_table_manager
from tool_shed.tools import tool_version_manager
log = logging.getLogger( __name__ )
@@ -508,6 +509,7 @@
reinstalling an uninstalled repository.
"""
shed_config_dict = self.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
+ tdtm = data_table_manager.ToolDataTableManager( self.app )
irmm = InstalledRepositoryMetadataManager( self.app, self.tpm )
metadata_dict, invalid_file_tups = \
irmm.generate_metadata_for_changeset_revision( repository=tool_shed_repository,
@@ -533,9 +535,9 @@
set_status=True )
if 'sample_files' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files )
tool_data_table_conf_filename, tool_data_table_elems = \
- tool_util.install_tool_data_tables( self.app, tool_shed_repository, tool_index_sample_files )
+ tdtm.install_tool_data_tables( tool_shed_repository, tool_index_sample_files )
if tool_data_table_elems:
self.app.tool_data_tables.add_new_entries_from_config_file( tool_data_table_conf_filename,
None,
@@ -544,16 +546,15 @@
if 'tools' in metadata_dict:
tool_panel_dict = self.tpm.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files )
tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = tool_util.handle_missing_data_table_entry( self.app,
- relative_install_dir,
- tool_path,
- repository_tools_tups )
+ repository_tools_tups = tdtm.handle_missing_data_table_entry( relative_install_dir,
+ tool_path,
+ repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app,
tool_path,
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -17,6 +17,7 @@
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import data_table_manager
from tool_shed.tools import tool_version_manager
from tool_shed.util import basic_util
@@ -440,25 +441,27 @@
else:
tool_dependencies = None
if 'tools' in metadata_dict:
+ tdtm = data_table_manager.ToolDataTableManager( self.app )
sample_files = metadata_dict.get( 'sample_files', [] )
sample_files = [ str( s ) for s in sample_files ]
- tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files )
tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict )
if repository_tools_tups:
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = tool_util.handle_missing_data_table_entry( self.app,
- relative_install_dir,
- self.tool_path,
- repository_tools_tups )
+ # Handle missing data table entries for tool parameters that are dynamically
+ # generated select lists.
+ repository_tools_tups = tdtm.handle_missing_data_table_entry( relative_install_dir,
+ self.tool_path,
+ repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app,
self.tool_path,
sample_files,
repository_tools_tups,
sample_files_copied )
- # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
+ # Copy remaining sample files included in the repository to the ~/tool-data
+ # directory of the local Galaxy instance.
tool_util.copy_sample_files( self.app,
sample_files,
tool_path=self.tool_path,
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/metadata/repository_metadata_manager.py
--- a/lib/tool_shed/metadata/repository_metadata_manager.py
+++ b/lib/tool_shed/metadata/repository_metadata_manager.py
@@ -897,7 +897,7 @@
# revisions from the changelog.
self.reset_all_tool_versions( id, repo )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( self.app )
+ self.app.tool_data_tables.data_tables = {}
return invalid_file_tups, metadata_dict
def reset_all_tool_versions( self, id, repo ):
@@ -1085,7 +1085,7 @@
metadata_dict )
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( self.app )
+ self.app.tool_data_tables.data_tables = {}
return message, status
def set_repository_metadata_due_to_new_tip( self, host, repository, content_alert_str=None, **kwd ):
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/tools/data_table_manager.py
--- /dev/null
+++ b/lib/tool_shed/tools/data_table_manager.py
@@ -0,0 +1,142 @@
+import logging
+import os
+import shutil
+
+from tool_shed.util import hg_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolDataTableManager( object ):
+
+ def __init__( self, app ):
+ self.app = app
+
+ def get_tool_index_sample_files( self, sample_files ):
+ """
+ Try to return the list of all appropriate tool data sample files included
+ in the repository.
+ """
+ tool_index_sample_files = []
+ for s in sample_files:
+ # The problem with this is that Galaxy does not follow a standard naming
+ # convention for file names.
+ if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
+ tool_index_sample_files.append( str( s ) )
+ return tool_index_sample_files
+
+ def handle_missing_data_table_entry( self, relative_install_dir, tool_path, repository_tools_tups ):
+ """
+ Inspect each tool to see if any have input parameters that are dynamically
+ generated select lists that require entries in the tool_data_table_conf.xml
+ file. This method is called only from Galaxy (not the tool shed) when a
+ repository is being installed or reinstalled.
+ """
+ missing_data_table_entry = False
+ for index, repository_tools_tup in enumerate( repository_tools_tups ):
+ tup_path, guid, repository_tool = repository_tools_tup
+ if repository_tool.params_with_missing_data_table_entry:
+ missing_data_table_entry = True
+ break
+ if missing_data_table_entry:
+ # The repository must contain a tool_data_table_conf.xml.sample file that includes
+ # all required entries for all tools in the repository.
+ sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample',
+ relative_install_dir )
+ if sample_tool_data_table_conf:
+ # Add entries to the ToolDataTableManager's in-memory data_tables dictionary.
+ error, message = self.handle_sample_tool_data_table_conf_file( sample_tool_data_table_conf,
+ persist=True )
+ if error:
+ # TODO: Do more here than logging an exception.
+ log.debug( message )
+ # Reload the tool into the local list of repository_tools_tups.
+ repository_tool = self.app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
+ repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ self.reset_tool_data_tables()
+ return repository_tools_tups
+
+ def handle_sample_tool_data_table_conf_file( self, filename, persist=False ):
+ """
+ Parse the incoming filename and add new entries to the in-memory
+ self.app.tool_data_tables dictionary. If persist is True (should
+ only occur if call is from the Galaxy side, not the tool shed), the
+ new entries will be appended to Galaxy's shed_tool_data_table_conf.xml
+ file on disk.
+ """
+ error = False
+ message = ''
+ try:
+ new_table_elems, message = self.app.tool_data_tables \
+ .add_new_entries_from_config_file( config_filename=filename,
+ tool_data_path=self.app.config.tool_data_path,
+ shed_tool_data_table_config=self.app.config.shed_tool_data_table_config,
+ persist=persist )
+ if message:
+ error = True
+ except Exception, e:
+ message = str( e )
+ error = True
+ return error, message
+
+ def install_tool_data_tables( self, tool_shed_repository, tool_index_sample_files ):
+ TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
+ TOOL_DATA_TABLE_FILE_SAMPLE_NAME = '%s.sample' % ( TOOL_DATA_TABLE_FILE_NAME )
+ SAMPLE_SUFFIX = '.sample'
+ SAMPLE_SUFFIX_OFFSET = -len( SAMPLE_SUFFIX )
+ tool_path, relative_target_dir = tool_shed_repository.get_tool_relative_path( self.app )
+ # This is where index files will reside on a per repo/installed version basis.
+ target_dir = os.path.join( self.app.config.shed_tool_data_path, relative_target_dir )
+ if not os.path.exists( target_dir ):
+ os.makedirs( target_dir )
+ for sample_file in tool_index_sample_files:
+ path, filename = os.path.split ( sample_file )
+ target_filename = filename
+ if target_filename.endswith( SAMPLE_SUFFIX ):
+ target_filename = target_filename[ : SAMPLE_SUFFIX_OFFSET ]
+ source_file = os.path.join( tool_path, sample_file )
+ # We're not currently uninstalling index files, do not overwrite existing files.
+ target_path_filename = os.path.join( target_dir, target_filename )
+ if not os.path.exists( target_path_filename ) or target_filename == TOOL_DATA_TABLE_FILE_NAME:
+ shutil.copy2( source_file, target_path_filename )
+ else:
+ log.debug( "Did not copy sample file '%s' to install directory '%s' because file already exists.", filename, target_dir )
+ # For provenance and to simplify introspection, let's keep the original data table sample file around.
+ if filename == TOOL_DATA_TABLE_FILE_SAMPLE_NAME:
+ shutil.copy2( source_file, os.path.join( target_dir, filename ) )
+ tool_data_table_conf_filename = os.path.join( target_dir, TOOL_DATA_TABLE_FILE_NAME )
+ elems = []
+ if os.path.exists( tool_data_table_conf_filename ):
+ tree, error_message = xml_util.parse_xml( tool_data_table_conf_filename )
+ if tree:
+ for elem in tree.getroot():
+ # Append individual table elems or other elemes, but not tables elems.
+ if elem.tag == 'tables':
+ for table_elem in elems:
+ elems.append( elem )
+ else:
+ elems.append( elem )
+ else:
+ log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.",
+ tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
+ for elem in elems:
+ if elem.tag == 'table':
+ for file_elem in elem.findall( 'file' ):
+ path = file_elem.get( 'path', None )
+ if path:
+ file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
+ # Store repository info in the table tag set for trace-ability.
+ repo_elem = suc.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
+ if elems:
+ # Remove old data_table
+ os.unlink( tool_data_table_conf_filename )
+ # Persist new data_table content.
+ self.app.tool_data_tables.to_xml_file( tool_data_table_conf_filename, elems )
+ return tool_data_table_conf_filename, elems
+
+ def reset_tool_data_tables( self ):
+ # Reset the tool_data_tables to an empty dictionary.
+ self.app.tool_data_tables.data_tables = {}
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/tools/tool_validator.py
--- a/lib/tool_shed/tools/tool_validator.py
+++ b/lib/tool_shed/tools/tool_validator.py
@@ -6,6 +6,8 @@
from galaxy.tools import parameters
from galaxy.tools.parameters import dynamic_options
+from tool_shed.tools import data_table_manager
+
from tool_shed.util import basic_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
@@ -18,6 +20,7 @@
def __init__( self, app ):
self.app = app
+ self.tdtm = data_table_manager.ToolDataTableManager( self.app )
def can_use_tool_config_disk_file( self, repository, repo, file_path, changeset_revision ):
"""
@@ -60,7 +63,8 @@
sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
if sample_tool_data_table_conf:
error, correction_msg = \
- tool_util.handle_sample_tool_data_table_conf_file( self.app, sample_tool_data_table_conf )
+ self.tdtm.handle_sample_tool_data_table_conf_file( sample_tool_data_table_conf,
+ persist=False )
if error:
invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
else:
@@ -199,7 +203,8 @@
if 'tool_data_table_conf.xml.sample' in sample_files:
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- error, message = tool_util.handle_sample_tool_data_table_conf_file( self.app, tool_data_table_config )
+ error, message = self.tdtm.handle_sample_tool_data_table_conf_file( tool_data_table_config,
+ persist=False )
tool, valid, message2 = self.load_tool_from_config( repository_id, tool_config_filepath )
message = self.concat_messages( message, message2 )
return tool, valid, message, sample_files
@@ -218,7 +223,8 @@
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
if tool_data_table_config:
- error, message = tool_util.handle_sample_tool_data_table_conf_file( self.app, tool_data_table_config )
+ error, message = self.tdtm.handle_sample_tool_data_table_conf_file( tool_data_table_config,
+ persist=False )
if error:
log.debug( message )
manifest_ctx, ctx_file = hg_util.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
@@ -279,7 +285,7 @@
basic_util.remove_dir( work_dir )
self.app.config.tool_data_path = original_tool_data_path
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( self.app )
+ self.tdtm.reset_tool_data_tables()
return repository, tool, message
def load_tool_from_config( self, repository_id, full_path ):
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/util/commit_util.py
--- a/lib/tool_shed/util/commit_util.py
+++ b/lib/tool_shed/util/commit_util.py
@@ -4,11 +4,15 @@
import os
import shutil
import tempfile
+
from galaxy.datatypes import checkers
+
+from tool_shed.tools import data_table_manager
+
from tool_shed.util import basic_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
-from tool_shed.util import tool_util
+
import tool_shed.repository_types.util as rt_util
log = logging.getLogger( __name__ )
@@ -184,7 +188,8 @@
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
# by parsing the file and adding new entries to the in-memory app.tool_data_tables
# dictionary.
- error, message = tool_util.handle_sample_tool_data_table_conf_file( app, filename_in_archive )
+ tdtm = data_table_manager.ToolDataTableManager( app )
+ error, message = tdtm.handle_sample_tool_data_table_conf_file( filename_in_archive, persist=False )
if error:
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
hg_util.commit_changeset( repo.ui,
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -11,9 +11,6 @@
from galaxy.tools.actions.upload import UploadToolAction
from tool_shed.util import basic_util
-from tool_shed.util import hg_util
-from tool_shed.util import xml_util
-import tool_shed.util.shed_util_common as suc
log = logging.getLogger( __name__ )
@@ -156,45 +153,6 @@
return tool_path, relative_install_dir
return None, None
-def get_tool_index_sample_files( sample_files ):
- """Try to return the list of all appropriate tool data sample files included in the repository."""
- tool_index_sample_files = []
- for s in sample_files:
- # The problem with this is that Galaxy does not follow a standard naming convention for file names.
- if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
- tool_index_sample_files.append( str( s ) )
- return tool_index_sample_files
-
-def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
- """
- Inspect each tool to see if any have input parameters that are dynamically
- generated select lists that require entries in the tool_data_table_conf.xml
- file. This method is called only from Galaxy (not the tool shed) when a
- repository is being installed or reinstalled.
- """
- missing_data_table_entry = False
- for index, repository_tools_tup in enumerate( repository_tools_tups ):
- tup_path, guid, repository_tool = repository_tools_tup
- if repository_tool.params_with_missing_data_table_entry:
- missing_data_table_entry = True
- break
- if missing_data_table_entry:
- # The repository must contain a tool_data_table_conf.xml.sample file that includes
- # all required entries for all tools in the repository.
- sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
- if sample_tool_data_table_conf:
- # Add entries to the ToolDataTableManager's in-memory data_tables dictionary.
- error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
- if error:
- # TODO: Do more here than logging an exception.
- log.debug( message )
- # Reload the tool into the local list of repository_tools_tups.
- repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
- repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( app )
- return repository_tools_tups
-
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ):
"""
Inspect each tool to see if it has any input parameters that are dynamically
@@ -222,84 +180,6 @@
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
return repository_tools_tups, sample_files_copied
-def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
- """
- Parse the incoming filename and add new entries to the in-memory
- app.tool_data_tables dictionary. If persist is True (should only occur
- if call is from the Galaxy side, not the tool shed), the new entries will
- be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
- """
- error = False
- message = ''
- try:
- new_table_elems, message = \
- app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
- tool_data_path=app.config.tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
- if message:
- error = True
- except Exception, e:
- message = str( e )
- error = True
- return error, message
-
-def install_tool_data_tables( app, tool_shed_repository, tool_index_sample_files ):
- """Only ever called from Galaxy end when installing"""
- TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
- TOOL_DATA_TABLE_FILE_SAMPLE_NAME = '%s.sample' % ( TOOL_DATA_TABLE_FILE_NAME )
- SAMPLE_SUFFIX = '.sample'
- SAMPLE_SUFFIX_OFFSET = -len( SAMPLE_SUFFIX )
- tool_path, relative_target_dir = tool_shed_repository.get_tool_relative_path( app )
- target_dir = os.path.join( app.config.shed_tool_data_path, relative_target_dir ) #this is where index files will reside on a per repo/installed version
- if not os.path.exists( target_dir ):
- os.makedirs( target_dir )
- for sample_file in tool_index_sample_files:
- path, filename = os.path.split ( sample_file )
- target_filename = filename
- if target_filename.endswith( SAMPLE_SUFFIX ):
- target_filename = target_filename[ : SAMPLE_SUFFIX_OFFSET ]
- source_file = os.path.join( tool_path, sample_file )
- #we're not currently uninstalling index files, do not overwrite existing files
- target_path_filename = os.path.join( target_dir, target_filename )
- if not os.path.exists( target_path_filename ) or target_filename == TOOL_DATA_TABLE_FILE_NAME:
- shutil.copy2( source_file, target_path_filename )
- else:
- log.debug( "Did not copy sample file '%s' to install directory '%s' because file already exists.", filename, target_dir )
- #for provenance and to simplify introspection, lets keep the original data table sample file around
- if filename == TOOL_DATA_TABLE_FILE_SAMPLE_NAME:
- shutil.copy2( source_file, os.path.join( target_dir, filename ) )
- tool_data_table_conf_filename = os.path.join( target_dir, TOOL_DATA_TABLE_FILE_NAME )
- elems = []
- if os.path.exists( tool_data_table_conf_filename ):
- tree, error_message = xml_util.parse_xml( tool_data_table_conf_filename )
- if tree:
- for elem in tree.getroot():
- #append individual table elems or other elemes, but not tables elems
- if elem.tag == 'tables':
- for table_elem in elems:
- elems.append( elem )
- else:
- elems.append( elem )
- else:
- log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.",
- tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
- for elem in elems:
- if elem.tag == 'table':
- for file_elem in elem.findall( 'file' ):
- path = file_elem.get( 'path', None )
- if path:
- file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
- #store repository info in the table tagset for traceability
- repo_elem = suc.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
- if elems:
- # Remove old data_table
- os.unlink( tool_data_table_conf_filename )
- # Persist new data_table content.
- app.tool_data_tables.to_xml_file( tool_data_table_conf_filename, elems )
- return tool_data_table_conf_filename, elems
-
-
def is_column_based( fname, sep='\t', skip=0, is_multi_byte=False ):
"""See if the file is column based with respect to a separator."""
headers = get_headers( fname, sep, is_multi_byte=is_multi_byte )
@@ -384,7 +264,3 @@
tool = app.toolbox.tools_by_id[ tool_id ]
if isinstance( tool.tool_action, UploadToolAction ):
app.toolbox.reload_tool_by_id( tool_id )
-
-def reset_tool_data_tables( app ):
- # Reset the tool_data_tables to an empty dictionary.
- app.tool_data_tables.data_tables = {}
diff -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 -r 9cce4ead10795278446af4ab84eecfc4e2b26dce lib/tool_shed/util/workflow_util.py
--- a/lib/tool_shed/util/workflow_util.py
+++ b/lib/tool_shed/util/workflow_util.py
@@ -16,7 +16,6 @@
from tool_shed.util import encoding_util
from tool_shed.util import metadata_util
from tool_shed.util import shed_util_common as suc
-from tool_shed.util import tool_util
eggs.require( "SVGFig" )
import svgfig
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add a DataManagerHandler for Galaxy installs from the Tool Shed, eliminating the use of the data_manager_util.py module.
by commits-noreply@bitbucket.org 22 Jul '14
by commits-noreply@bitbucket.org 22 Jul '14
22 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b2cdaba8715e/
Changeset: b2cdaba8715e
User: greg
Date: 2014-07-22 19:46:40
Summary: Add a DataManagerHandler for Galaxy installs from the Tool Shed, eliminating the use of the data_manager_util.py module.
Affected #: 5 files
diff -r 09e86deab7932a72af7717bef9ec67ea70d694c9 -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -12,7 +12,6 @@
import tool_shed.repository_types.util as rt_util
from tool_shed.util import common_util
-from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
@@ -30,6 +29,7 @@
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
+from tool_shed.galaxy_install.tools import data_manager
from tool_shed.galaxy_install.tools import tool_panel_manager
from tool_shed.tools import tool_version_manager
@@ -252,7 +252,8 @@
shed_tool_conf,
uninstall=remove_from_disk_checked )
if tool_shed_repository.includes_data_managers:
- data_manager_util.remove_from_data_manager( trans.app, tool_shed_repository )
+ dmh = data_manager.DataManagerHandler( trans.app )
+ dmh.remove_from_data_manager( tool_shed_repository )
if tool_shed_repository.includes_datatypes:
# Deactivate proprietary datatypes.
installed_repository_dict = datatype_util.load_installed_datatypes( trans.app,
@@ -1933,7 +1934,8 @@
hg_util.update_repository( repo, latest_ctx_rev )
# Remove old Data Manager entries
if repository.includes_data_managers:
- data_manager_util.remove_from_data_manager( trans.app, repository )
+ dmh = data_manager.DataManagerHandler( trans.app )
+ dmh.remove_from_data_manager( repository )
# Update the repository metadata.
tpm = tool_panel_manager.ToolPanelManager( trans.app )
irmm = InstalledRepositoryMetadataManager( trans.app, tpm )
@@ -1962,13 +1964,13 @@
new_install=False )
# Add new Data Manager entries
if 'data_manager' in metadata_dict:
- new_data_managers = data_manager_util.install_data_managers( trans.app,
- trans.app.config.shed_data_manager_config_file,
- metadata_dict,
- repository.get_shed_config_dict( trans.app ),
- os.path.join( relative_install_dir, name ),
- repository,
- repository_tools_tups )
+ dmh = data_manager.DataManagerHandler( trans.app )
+ new_data_managers = dmh.install_data_managers( trans.app.config.shed_data_manager_config_file,
+ metadata_dict,
+ repository.get_shed_config_dict( trans.app ),
+ os.path.join( relative_install_dir, name ),
+ repository,
+ repository_tools_tups )
if 'repository_dependencies' in metadata_dict or 'tool_dependencies' in metadata_dict:
new_repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} )
new_repository_dependencies = new_repository_dependencies_dict.get( 'repository_dependencies', [] )
diff -r 09e86deab7932a72af7717bef9ec67ea70d694c9 -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -18,7 +18,6 @@
from tool_shed.util import basic_util
from tool_shed.util import common_util
-from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
@@ -33,6 +32,7 @@
from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
+from tool_shed.galaxy_install.tools import data_manager
from tool_shed.galaxy_install.tools import tool_panel_manager
from tool_shed.tools import tool_version_manager
@@ -575,13 +575,13 @@
tool_panel_dict=tool_panel_dict,
new_install=True )
if 'data_manager' in metadata_dict:
- new_data_managers = data_manager_util.install_data_managers( self.app,
- self.app.config.shed_data_manager_config_file,
- metadata_dict,
- shed_config_dict,
- relative_install_dir,
- tool_shed_repository,
- repository_tools_tups )
+ dmh = data_manager.DataManagerHandler( self.app )
+ new_data_managers = dmh.install_data_managers( self.app.config.shed_data_manager_config_file,
+ metadata_dict,
+ shed_config_dict,
+ relative_install_dir,
+ tool_shed_repository,
+ repository_tools_tups )
if 'datatypes' in metadata_dict:
tool_shed_repository.status = self.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
diff -r 09e86deab7932a72af7717bef9ec67ea70d694c9 -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -7,7 +7,6 @@
from galaxy import util
from tool_shed.util import common_util
from tool_shed.util import container_util
-from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
@@ -16,6 +15,7 @@
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
+from tool_shed.galaxy_install.tools import data_manager
from tool_shed.galaxy_install.tools import tool_panel_manager
log = logging.getLogger( __name__ )
@@ -99,13 +99,13 @@
tp, data_manager_relative_install_dir = repository.get_tool_relative_path( self.app )
# Hack to add repository.name here, which is actually the root of the installed repository
data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name )
- new_data_managers = data_manager_util.install_data_managers( self.app,
- self.app.config.shed_data_manager_config_file,
- metadata,
- repository.get_shed_config_dict( self.app ),
- data_manager_relative_install_dir,
- repository,
- repository_tools_tups )
+ dmh = data_manager.DataManagerHandler( self.app )
+ new_data_managers = dmh.install_data_managers( self.app.config.shed_data_manager_config_file,
+ metadata,
+ repository.get_shed_config_dict( self.app ),
+ data_manager_relative_install_dir,
+ repository,
+ repository_tools_tups )
self.install_model.context.add( repository )
self.install_model.context.flush()
if repository.includes_datatypes:
diff -r 09e86deab7932a72af7717bef9ec67ea70d694c9 -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 lib/tool_shed/galaxy_install/tools/data_manager.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tools/data_manager.py
@@ -0,0 +1,150 @@
+import logging
+import os
+
+from tool_shed.galaxy_install.tools import tool_panel_manager
+
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class DataManagerHandler( object ):
+
+ def __init__( self, app ):
+ self.app = app
+
+ def data_manager_config_elems_to_xml_file( self, config_elems, config_filename ):
+ """
+ Persist the current in-memory list of config_elems to a file named by the value
+ of config_filename.
+ """
+ fh = open( config_filename, 'wb' )
+ fh.write( '<?xml version="1.0"?>\n<data_managers>\n' )
+ for elem in config_elems:
+ fh.write( xml_util.xml_to_string( elem ) )
+ fh.write( '</data_managers>\n' )
+ fh.close()
+
+ def install_data_managers( self, shed_data_manager_conf_filename, metadata_dict, shed_config_dict,
+ relative_install_dir, repository, repository_tools_tups ):
+ rval = []
+ if 'data_manager' in metadata_dict:
+ tpm = tool_panel_manager.ToolPanelManager( self.app )
+ repository_tools_by_guid = {}
+ for tool_tup in repository_tools_tups:
+ repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
+ # Load existing data managers.
+ tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+ if tree is None:
+ return rval
+ config_elems = [ elem for elem in tree.getroot() ]
+ repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
+ if repo_data_manager_conf_filename is None:
+ log.debug( "No data_manager_conf.xml file has been defined." )
+ return rval
+ data_manager_config_has_changes = False
+ relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
+ repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
+ tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
+ if tree is None:
+ return rval
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'data_manager':
+ data_manager_id = elem.get( 'id', None )
+ if data_manager_id is None:
+ log.error( "A data manager was defined that does not have an id and will not be installed:\n%s" % \
+ xml_util.xml_to_string( elem ) )
+ continue
+ data_manager_dict = metadata_dict['data_manager'].get( 'data_managers', {} ).get( data_manager_id, None )
+ if data_manager_dict is None:
+ log.error( "Data manager metadata is not defined properly for '%s'." % ( data_manager_id ) )
+ continue
+ guid = data_manager_dict.get( 'guid', None )
+ if guid is None:
+ log.error( "Data manager guid '%s' is not set in metadata for '%s'." % ( guid, data_manager_id ) )
+ continue
+ elem.set( 'guid', guid )
+ tool_guid = data_manager_dict.get( 'tool_guid', None )
+ if tool_guid is None:
+ log.error( "Data manager tool guid '%s' is not set in metadata for '%s'." % ( tool_guid, data_manager_id ) )
+ continue
+ tool_dict = repository_tools_by_guid.get( tool_guid, None )
+ if tool_dict is None:
+ log.error( "Data manager tool guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" % \
+ ( tool_guid, data_manager_id ) )
+ continue
+ tool = tool_dict.get( 'tool', None )
+ if tool is None:
+ log.error( "Data manager tool with guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" % \
+ ( tool_guid, data_manager_id ) )
+ continue
+ tool_config_filename = tool_dict.get( 'tool_config_filename', None )
+ if tool_config_filename is None:
+ log.error( "Data manager metadata is missing 'tool_config_file' for '%s'." % ( data_manager_id ) )
+ continue
+ elem.set( 'shed_conf_file', shed_config_dict['config_filename'] )
+ if elem.get( 'tool_file', None ) is not None:
+ del elem.attrib[ 'tool_file' ] #remove old tool_file info
+ tool_elem = tpm.generate_tool_elem( repository.tool_shed,
+ repository.name,
+ repository.installed_changeset_revision,
+ repository.owner,
+ tool_config_filename,
+ tool,
+ None )
+ elem.insert( 0, tool_elem )
+ data_manager = \
+ self.app.data_managers.load_manager_from_elem( elem,
+ tool_path=shed_config_dict.get( 'tool_path', '' ),
+ replace_existing=True )
+ if data_manager:
+ rval.append( data_manager )
+ else:
+ log.warning( "Encountered unexpected element '%s':\n%s" % ( elem.tag, xml_util.xml_to_string( elem ) ) )
+ config_elems.append( elem )
+ data_manager_config_has_changes = True
+ # Persist the altered shed_data_manager_config file.
+ if data_manager_config_has_changes:
+ self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename )
+ return rval
+
+ def remove_from_data_manager( self, repository ):
+ metadata_dict = repository.metadata
+ if metadata_dict and 'data_manager' in metadata_dict:
+ shed_data_manager_conf_filename = self.app.config.shed_data_manager_config_file
+ tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+ if tree:
+ root = tree.getroot()
+ assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
+ guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
+ load_old_data_managers_by_guid = {}
+ data_manager_config_has_changes = False
+ config_elems = []
+ for elem in root:
+ # Match Data Manager elements by guid and installed_changeset_revision
+ elem_matches_removed_data_manager = False
+ if elem.tag == 'data_manager':
+ guid = elem.get( 'guid', None )
+ if guid in guids:
+ tool_elem = elem.find( 'tool' )
+ if tool_elem is not None:
+ installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
+ if installed_changeset_revision_elem is not None:
+ if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
+ elem_matches_removed_data_manager = True
+ else:
+ # This is a different version, which had been previously overridden
+ load_old_data_managers_by_guid[ guid ] = elem
+ if elem_matches_removed_data_manager:
+ data_manager_config_has_changes = True
+ else:
+ config_elems.append( elem )
+ # Remove data managers from in memory
+ self.app.data_managers.remove_manager( guids )
+ # Load other versions of any now uninstalled data managers, if any
+ for elem in load_old_data_managers_by_guid.itervalues():
+ self.app.data_managers.load_manager_from_elem( elem )
+ # Persist the altered shed_data_manager_config file.
+ if data_manager_config_has_changes:
+ self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename )
diff -r 09e86deab7932a72af7717bef9ec67ea70d694c9 -r b2cdaba8715e1e2c18dab5f365fac28b6c5365a4 lib/tool_shed/util/data_manager_util.py
--- a/lib/tool_shed/util/data_manager_util.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import logging
-import os
-
-from tool_shed.galaxy_install.tools import tool_panel_manager
-
-from tool_shed.util import shed_util_common as suc
-from tool_shed.util import xml_util
-
-log = logging.getLogger( __name__ )
-
-def data_manager_config_elems_to_xml_file( app, config_elems, config_filename ):#, shed_tool_conf_filename ):
- # Persist the current in-memory list of config_elems to a file named by the value of config_filename.
- fh = open( config_filename, 'wb' )
- fh.write( '<?xml version="1.0"?>\n<data_managers>\n' )#% ( shed_tool_conf_filename ))
- for elem in config_elems:
- fh.write( xml_util.xml_to_string( elem ) )
- fh.write( '</data_managers>\n' )
- fh.close()
-
-def install_data_managers( app, shed_data_manager_conf_filename, metadata_dict, shed_config_dict, relative_install_dir, repository, repository_tools_tups ):
- rval = []
- if 'data_manager' in metadata_dict:
- tpm = tool_panel_manager.ToolPanelManager( app )
- repository_tools_by_guid = {}
- for tool_tup in repository_tools_tups:
- repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
- # Load existing data managers.
- tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
- if tree is None:
- return rval
- config_elems = [ elem for elem in tree.getroot() ]
- repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
- if repo_data_manager_conf_filename is None:
- log.debug( "No data_manager_conf.xml file has been defined." )
- return rval
- data_manager_config_has_changes = False
- relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
- repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
- tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
- if tree is None:
- return rval
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'data_manager':
- data_manager_id = elem.get( 'id', None )
- if data_manager_id is None:
- log.error( "A data manager was defined that does not have an id and will not be installed:\n%s" % ( xml_util.xml_to_string( elem ) ) )
- continue
- data_manager_dict = metadata_dict['data_manager'].get( 'data_managers', {} ).get( data_manager_id, None )
- if data_manager_dict is None:
- log.error( "Data manager metadata is not defined properly for '%s'." % ( data_manager_id ) )
- continue
- guid = data_manager_dict.get( 'guid', None )
- if guid is None:
- log.error( "Data manager guid '%s' is not set in metadata for '%s'." % ( guid, data_manager_id ) )
- continue
- elem.set( 'guid', guid )
- tool_guid = data_manager_dict.get( 'tool_guid', None )
- if tool_guid is None:
- log.error( "Data manager tool guid '%s' is not set in metadata for '%s'." % ( tool_guid, data_manager_id ) )
- continue
- tool_dict = repository_tools_by_guid.get( tool_guid, None )
- if tool_dict is None:
- log.error( "Data manager tool guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" % ( tool_guid, data_manager_id ) )
- continue
- tool = tool_dict.get( 'tool', None )
- if tool is None:
- log.error( "Data manager tool with guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" % ( tool_guid, data_manager_id ) )
- continue
- tool_config_filename = tool_dict.get( 'tool_config_filename', None )
- if tool_config_filename is None:
- log.error( "Data manager metadata is missing 'tool_config_file' for '%s'." % ( data_manager_id ) )
- continue
- elem.set( 'shed_conf_file', shed_config_dict['config_filename'] )
- if elem.get( 'tool_file', None ) is not None:
- del elem.attrib[ 'tool_file' ] #remove old tool_file info
- tool_elem = tpm.generate_tool_elem( repository.tool_shed,
- repository.name,
- repository.installed_changeset_revision,
- repository.owner,
- tool_config_filename,
- tool,
- None )
- elem.insert( 0, tool_elem )
- data_manager = app.data_managers.load_manager_from_elem( elem, tool_path=shed_config_dict.get( 'tool_path', '' ), replace_existing=True )
- if data_manager:
- rval.append( data_manager )
- else:
- log.warning( "Encountered unexpected element '%s':\n%s" % ( elem.tag, xml_util.xml_to_string( elem ) ) )
- config_elems.append( elem )
- data_manager_config_has_changes = True
- # Persist the altered shed_data_manager_config file.
- if data_manager_config_has_changes:
- data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
- return rval
-
-def remove_from_data_manager( app, repository ):
- metadata_dict = repository.metadata
- if metadata_dict and 'data_manager' in metadata_dict:
- shed_data_manager_conf_filename = app.config.shed_data_manager_config_file
- tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
- if tree:
- root = tree.getroot()
- assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
- guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
- load_old_data_managers_by_guid = {}
- data_manager_config_has_changes = False
- config_elems = []
- for elem in root:
- # Match Data Manager elements by guid and installed_changeset_revision
- elem_matches_removed_data_manager = False
- if elem.tag == 'data_manager':
- guid = elem.get( 'guid', None )
- if guid in guids:
- tool_elem = elem.find( 'tool' )
- if tool_elem is not None:
- installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
- if installed_changeset_revision_elem is not None:
- if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
- elem_matches_removed_data_manager = True
- else:
- # This is a different version, which had been previously overridden
- load_old_data_managers_by_guid[ guid ] = elem
- if elem_matches_removed_data_manager:
- data_manager_config_has_changes = True
- else:
- config_elems.append( elem )
- # Remove data managers from in memory
- app.data_managers.remove_manager( guids )
- # Load other versions of any now uninstalled data managers, if any
- for elem in load_old_data_managers_by_guid.itervalues():
- app.data_managers.load_manager_from_elem( elem )
- # Persist the altered shed_data_manager_config file.
- if data_manager_config_has_changes:
- data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add a ToolVersionManager to manage tool versions in the Tool Shed and Galaxy installs.
by commits-noreply@bitbucket.org 22 Jul '14
by commits-noreply@bitbucket.org 22 Jul '14
22 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/09e86deab793/
Changeset: 09e86deab793
User: greg
Date: 2014-07-22 17:33:35
Summary: Add a ToolVersionManager to manage tool versions in the Tool Shed and Galaxy installs.
Affected #: 10 files
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -32,6 +32,8 @@
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import tool_version_manager
+
log = logging.getLogger( __name__ )
@@ -1799,7 +1801,8 @@
text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
if text:
tool_version_dicts = json.from_json_string( text )
- tool_util.handle_tool_versions( trans.app, tool_version_dicts, repository )
+ tvm = tool_version_manager.ToolVersionManager( trans.app )
+ tvm.handle_tool_versions( tool_version_dicts, repository )
message = "Tool versions have been set for all included tools."
status = 'done'
else:
@@ -1932,7 +1935,8 @@
if repository.includes_data_managers:
data_manager_util.remove_from_data_manager( trans.app, repository )
# Update the repository metadata.
- irmm = InstalledRepositoryMetadataManager( trans.app )
+ tpm = tool_panel_manager.ToolPanelManager( trans.app )
+ irmm = InstalledRepositoryMetadataManager( trans.app, tpm )
metadata_dict, invalid_file_tups = \
irmm.generate_metadata_for_changeset_revision( repository=repository,
changeset_revision=latest_changeset_revision,
@@ -1944,7 +1948,6 @@
updating_installed_repository=True,
persist=True )
if 'tools' in metadata_dict:
- tpm = tool_panel_manager.ToolPanelManager( trans.app )
tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
if tool_panel_dict is None:
tool_panel_dict = tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( repository )
@@ -2123,7 +2126,8 @@
tool_config = os.path.join( shed_config_dict.get( 'tool_path' ), tool_config )
tool = trans.app.toolbox.load_tool( os.path.abspath( tool_config ), guid=tool_metadata[ 'guid' ] )
if tool:
- tool_version = tool_util.get_tool_version( trans.app, str( tool.id ) )
+ tvm = tool_version_manager.ToolVersionManager( trans.app )
+ tool_version = tvm.get_tool_version( str( tool.id ) )
tool_lineage = tool_version.get_version_ids( trans.app, reverse=True )
break
return trans.fill_template( "/admin/tool_shed_repository/view_tool_metadata.mako",
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -20,7 +20,9 @@
from tool_shed.galaxy_install import dependency_display
from tool_shed.metadata import repository_metadata_manager
+
from tool_shed.tools import tool_validator
+from tool_shed.tools import tool_version_manager
from tool_shed.util import basic_util
from tool_shed.util import common_util
@@ -3432,10 +3434,10 @@
basic_util.remove_dir( work_dir )
break
if guid:
- tool_lineage = tool_util.get_version_lineage_for_tool( trans.app,
- repository_id,
- repository_metadata,
- guid )
+ tvm = tool_version_manager.ToolVersionManager( trans.app )
+ tool_lineage = tvm.get_version_lineage_for_tool( repository_id,
+ repository_metadata,
+ guid )
else:
repository_metadata_id = None
metadata = None
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/galaxy_install/dependency_display.py
--- a/lib/tool_shed/galaxy_install/dependency_display.py
+++ b/lib/tool_shed/galaxy_install/dependency_display.py
@@ -1,3 +1,4 @@
+import json
import logging
import os
import threading
@@ -461,7 +462,7 @@
url = common_util.url_join( tool_shed_url,
'repository/get_readme_files%s' % params )
raw_text = common_util.tool_shed_get( self.app, tool_shed_url, url )
- readme_files_dict = json.from_json_string( raw_text )
+ readme_files_dict = json.loads( raw_text )
else:
readme_files_dict = readme_util.build_readme_files_dict( self.app,
repository,
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -35,6 +35,8 @@
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import tool_version_manager
+
log = logging.getLogger( __name__ )
@@ -438,10 +440,13 @@
class InstallRepositoryManager( object ):
- def __init__( self, app ):
+ def __init__( self, app, tpm=None ):
self.app = app
self.install_model = self.app.install_model
- self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+ if tpm is None:
+ self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+ else:
+ self.tpm = tpm
def get_repository_components_for_installation( self, encoded_tsr_id, encoded_tsr_ids, repo_info_dicts,
tool_panel_section_keys ):
@@ -503,7 +508,7 @@
reinstalling an uninstalled repository.
"""
shed_config_dict = self.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- irmm = InstalledRepositoryMetadataManager( self.app )
+ irmm = InstalledRepositoryMetadataManager( self.app, self.tpm )
metadata_dict, invalid_file_tups = \
irmm.generate_metadata_for_changeset_revision( repository=tool_shed_repository,
changeset_revision=tool_shed_repository.changeset_revision,
@@ -869,7 +874,8 @@
text = common_util.tool_shed_get( self.app, tool_shed_url, url )
if text:
tool_version_dicts = json.loads( text )
- tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
+ tvm = tool_version_manager.ToolVersionManager( self.app )
+ tvm.handle_tool_versions( tool_version_dicts, tool_shed_repository )
else:
if not error_message:
error_message = ""
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -81,8 +81,8 @@
repository.deleted = False
repository.status = self.install_model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools_for_display_in_tool_panel:
- irmm = InstalledRepositoryMetadataManager( self.app )
tpm = tool_panel_manager.ToolPanelManager( self.app )
+ irmm = InstalledRepositoryMetadataManager( self.app, tpm )
metadata = repository.metadata
repository_tools_tups = irmm.get_repository_tools_tups( metadata )
# Reload tools into the appropriate tool panel section.
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
--- a/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
+++ b/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
@@ -19,10 +19,13 @@
class InstalledRepositoryMetadataManager( metadata_generator.MetadataGenerator ):
- def __init__( self, app ):
+ def __init__( self, app, tpm=None ):
super( InstalledRepositoryMetadataManager, self ).__init__( app )
self.app = app
- self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+ if tpm is None:
+ self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+ else:
+ self.tpm = tpm
def build_repository_ids_select_field( self, name='repository_ids', multiple=True, display='checkboxes' ):
"""Generate the current list of repositories for resetting metadata."""
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/galaxy_install/repair_repository_manager.py
--- a/lib/tool_shed/galaxy_install/repair_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py
@@ -150,6 +150,7 @@
metadata = repository.metadata
repair_dict = {}
+ tpm = tool_panel_manager.ToolPanelManager( self.app )
if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
try:
self.app.installed_repository_manager.activate_repository( repository )
@@ -162,7 +163,6 @@
suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
# Reset the repository attributes to the New state for installation.
if metadata:
- tpm = tool_panel_manager.ToolPanelManager( self.app )
tool_section, tool_panel_section_key = \
tpm.handle_tool_panel_selection( self.app.toolbox,
metadata,
@@ -179,7 +179,7 @@
deleted=False,
uninstalled=False,
remove_from_disk=True )
- irm = install_manager.InstallRepositoryManager( self.app )
+ irm = install_manager.InstallRepositoryManager( self.app, tpm )
irm.install_tool_shed_repository( repository,
repo_info_dict,
tool_panel_section_key,
@@ -190,7 +190,7 @@
if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.ERROR ]:
repair_dict = add_repair_dict_entry( repository.name, repository.error_message )
else:
- irm = install_manager.InstallRepositoryManager( self.app )
+ irm = install_manager.InstallRepositoryManager( self.app, tpm )
# We have an installed tool shed repository, so handle tool dependencies if necessary.
if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" )
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -11,11 +11,14 @@
from galaxy import util
from galaxy.tools import ToolSection
+from galaxy.util.odict import odict
from tool_shed.galaxy_install import install_manager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import tool_version_manager
+
from tool_shed.util import basic_util
from tool_shed.util import common_util
from tool_shed.util import datatype_util
@@ -24,7 +27,6 @@
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
from tool_shed.util import xml_util
-from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
@@ -41,6 +43,7 @@
self.app = app
self.toolbox = self.app.toolbox
self.migrated_tools_config = migrated_tools_config
+ # Initialize the ToolPanelManager.
self.tpm = tool_panel_manager.ToolPanelManager( self.app )
# If install_dependencies is True but tool_dependency_dir is not set, do not attempt
# to install but print informative error message.
@@ -412,7 +415,7 @@
log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs:\n%s" % str( e ) )
finally:
lock.release()
- irmm = InstalledRepositoryMetadataManager( self.app )
+ irmm = InstalledRepositoryMetadataManager( self.app, self.tpm )
metadata_dict, invalid_file_tups = \
irmm.generate_metadata_for_changeset_revision( repository=tool_shed_repository,
changeset_revision=tool_shed_repository.changeset_revision,
@@ -471,7 +474,7 @@
new_install=True )
if install_dependencies and tool_dependencies and has_tool_dependencies:
# Install tool dependencies.
- irm = install_manager.InstallRepositoryManager( self.app )
+ irm = install_manager.InstallRepositoryManager( self.app, self.tpm )
itdm = install_manager.InstallToolDependencyManager( self.app )
irm.update_tool_shed_repository_status( tool_shed_repository,
self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
@@ -533,7 +536,7 @@
if cloned_ok and is_installed:
print "Skipping automatic install of repository '", tool_shed_repository.name, "' because it has already been installed in location ", clone_dir
else:
- irm = install_manager.InstallRepositoryManager( self.app )
+ irm = install_manager.InstallRepositoryManager( self.app, self.tpm )
repository_clone_url = os.path.join( self.tool_shed_url, 'repos', tool_shed_repository.owner, tool_shed_repository.name )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( clone_dir, tool_shed_repository.name )
@@ -556,6 +559,8 @@
self.app.install_model.context.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
+ # Initialize the ToolVersionManager.
+ tvm = tool_version_manager.ToolVersionManager( self.app )
irm.update_tool_shed_repository_status( tool_shed_repository,
self.app.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
@@ -564,7 +569,7 @@
text = common_util.tool_shed_get( self.app, self.tool_shed_url, url )
if text:
tool_version_dicts = json.loads( text )
- tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
+ tvm.handle_tool_versions( tool_version_dicts, tool_shed_repository )
else:
# Set the tool versions since they seem to be missing for this repository in the tool shed.
# CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
@@ -573,8 +578,8 @@
tool_id = tool_dict[ 'guid' ]
old_tool_id = tool_dict[ 'id' ]
tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = tool_util.get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = tool_util.get_tool_version( self.app, tool_id )
+ tool_version_using_old_id = tvm.get_tool_version( old_tool_id )
+ tool_version_using_guid = tvm.get_tool_version( tool_id )
if not tool_version_using_old_id:
tool_version_using_old_id = self.app.install_model.ToolVersion( tool_id=old_tool_id,
tool_shed_repository=tool_shed_repository )
@@ -586,12 +591,12 @@
self.app.install_model.context.add( tool_version_using_guid )
self.app.install_model.context.flush()
# Associate the two versions as parent / child.
- tool_version_association = tool_util.get_tool_version_association( self.app,
- tool_version_using_old_id,
- tool_version_using_guid )
+ tool_version_association = tvm.get_tool_version_association( tool_version_using_old_id,
+ tool_version_using_guid )
if not tool_version_association:
- tool_version_association = self.app.install_model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
- parent_id=tool_version_using_old_id.id )
+ tool_version_association = \
+ self.app.install_model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
+ parent_id=tool_version_using_old_id.id )
self.app.install_model.context.add( tool_version_association )
self.app.install_model.context.flush()
irm.update_tool_shed_repository_status( tool_shed_repository,
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/tools/tool_version_manager.py
--- /dev/null
+++ b/lib/tool_shed/tools/tool_version_manager.py
@@ -0,0 +1,103 @@
+import logging
+import os
+
+from galaxy.model.orm import and_
+
+from tool_shed.util import hg_util
+from tool_shed.util import shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class ToolVersionManager( object ):
+
+ def __init__( self, app ):
+ self.app = app
+
+ def get_tool_version( self, tool_id ):
+ context = self.app.install_model.context
+ return context.query( self.app.install_model.ToolVersion ) \
+ .filter( self.app.install_model.ToolVersion.table.c.tool_id == tool_id ) \
+ .first()
+
+ def get_tool_version_association( self, parent_tool_version, tool_version ):
+ """
+ Return a ToolVersionAssociation if one exists that associates the two received
+ tool_versions This function is called only from Galaxy.
+ """
+ context = self.app.install_model.context
+ return context.query( self.app.install_model.ToolVersionAssociation ) \
+ .filter( and_( self.app.install_model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
+ self.app.install_model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
+ .first()
+
+ def get_version_lineage_for_tool( self, repository_id, repository_metadata, guid ):
+ """
+ Return the tool version lineage chain in descendant order for the received
+ guid contained in the received repsitory_metadata.tool_versions. This function
+ is called only from the Tool Shed.
+ """
+ repository = suc.get_repository_by_id( self.app, repository_id )
+ repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+ # Initialize the tool lineage
+ version_lineage = [ guid ]
+ # Get all ancestor guids of the received guid.
+ current_child_guid = guid
+ for changeset in hg_util.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
+ ctx = repo.changectx( changeset )
+ rm = suc.get_repository_metadata_by_changeset_revision( self.app, repository_id, str( ctx ) )
+ if rm:
+ parent_guid = rm.tool_versions.get( current_child_guid, None )
+ if parent_guid:
+ version_lineage.append( parent_guid )
+ current_child_guid = parent_guid
+ # Get all descendant guids of the received guid.
+ current_parent_guid = guid
+ for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo,
+ repository_metadata.changeset_revision,
+ repository.tip( self.app ) ):
+ ctx = repo.changectx( changeset )
+ rm = suc.get_repository_metadata_by_changeset_revision( self.app, repository_id, str( ctx ) )
+ if rm:
+ tool_versions = rm.tool_versions
+ for child_guid, parent_guid in tool_versions.items():
+ if parent_guid == current_parent_guid:
+ version_lineage.insert( 0, child_guid )
+ current_parent_guid = child_guid
+ break
+ return version_lineage
+
+ def handle_tool_versions( self, tool_version_dicts, tool_shed_repository ):
+ """
+ Using the list of tool_version_dicts retrieved from the Tool Shed (one per changeset
+ revision up to the currently installed changeset revision), create the parent / child
+ pairs of tool versions. Each dictionary contains { tool id : parent tool id } pairs.
+ This function is called only from Galaxy.
+ """
+ context = self.app.install_model.context
+ for tool_version_dict in tool_version_dicts:
+ for tool_guid, parent_id in tool_version_dict.items():
+ tool_version_using_tool_guid = self.get_tool_version( tool_guid )
+ tool_version_using_parent_id = self.get_tool_version( parent_id )
+ if not tool_version_using_tool_guid:
+ tool_version_using_tool_guid = \
+ self.app.install_model.ToolVersion( tool_id=tool_guid,
+ tool_shed_repository=tool_shed_repository )
+ context.add( tool_version_using_tool_guid )
+ context.flush()
+ if not tool_version_using_parent_id:
+ tool_version_using_parent_id = \
+ self.app.install_model.ToolVersion( tool_id=parent_id,
+ tool_shed_repository=tool_shed_repository )
+ context.add( tool_version_using_parent_id )
+ context.flush()
+ tool_version_association = \
+ self.get_tool_version_association( tool_version_using_parent_id,
+ tool_version_using_tool_guid )
+ if not tool_version_association:
+ # Associate the two versions as parent / child.
+ tool_version_association = \
+ self.app.install_model.ToolVersionAssociation( tool_id=tool_version_using_tool_guid.id,
+ parent_id=tool_version_using_parent_id.id )
+ context.add( tool_version_association )
+ context.flush()
diff -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 -r 09e86deab7932a72af7717bef9ec67ea70d694c9 lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -1,21 +1,19 @@
import logging
import os
import shutil
+
import galaxy.tools
from galaxy import util
from galaxy.datatypes import checkers
-from galaxy.model.orm import and_
from galaxy.tools import parameters
-from galaxy.tools.search import ToolBoxSearch
from galaxy.util.expressions import ExpressionContext
from galaxy.web.form_builder import SelectField
from galaxy.tools.actions.upload import UploadToolAction
+
from tool_shed.util import basic_util
-from tool_shed.util import common_util
from tool_shed.util import hg_util
from tool_shed.util import xml_util
import tool_shed.util.shed_util_common as suc
-from xml.etree import ElementTree as XmlET
log = logging.getLogger( __name__ )
@@ -167,55 +165,6 @@
tool_index_sample_files.append( str( s ) )
return tool_index_sample_files
-def get_tool_version( app, tool_id ):
- context = app.install_model.context
- return context.query( app.install_model.ToolVersion ) \
- .filter( app.install_model.ToolVersion.table.c.tool_id == tool_id ) \
- .first()
-
-def get_tool_version_association( app, parent_tool_version, tool_version ):
- """Return a ToolVersionAssociation if one exists that associates the two received tool_versions"""
- context = app.install_model.context
- return context.query( app.install_model.ToolVersionAssociation ) \
- .filter( and_( app.install_model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
- app.install_model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
- .first()
-
-def get_version_lineage_for_tool( app, repository_id, repository_metadata, guid ):
- """
- Return the tool version lineage chain in descendant order for the received
- guid contained in the received repsitory_metadata.tool_versions.
- """
- repository = suc.get_repository_by_id( app, repository_id )
- repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
- # Initialize the tool lineage
- version_lineage = [ guid ]
- # Get all ancestor guids of the received guid.
- current_child_guid = guid
- for changeset in hg_util.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
- ctx = repo.changectx( changeset )
- rm = suc.get_repository_metadata_by_changeset_revision( app, repository_id, str( ctx ) )
- if rm:
- parent_guid = rm.tool_versions.get( current_child_guid, None )
- if parent_guid:
- version_lineage.append( parent_guid )
- current_child_guid = parent_guid
- # Get all descendant guids of the received guid.
- current_parent_guid = guid
- for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo,
- repository_metadata.changeset_revision,
- repository.tip( app ) ):
- ctx = repo.changectx( changeset )
- rm = suc.get_repository_metadata_by_changeset_revision( app, repository_id, str( ctx ) )
- if rm:
- tool_versions = rm.tool_versions
- for child_guid, parent_guid in tool_versions.items():
- if parent_guid == current_parent_guid:
- version_lineage.insert( 0, child_guid )
- current_parent_guid = child_guid
- break
- return version_lineage
-
def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically
@@ -295,35 +244,6 @@
error = True
return error, message
-def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
- """
- Using the list of tool_version_dicts retrieved from the tool shed (one per changeset
- revison up to the currently installed changeset revision), create the parent / child
- pairs of tool versions. Each dictionary contains { tool id : parent tool id } pairs.
- """
- context = app.install_model.context
- for tool_version_dict in tool_version_dicts:
- for tool_guid, parent_id in tool_version_dict.items():
- tool_version_using_tool_guid = get_tool_version( app, tool_guid )
- tool_version_using_parent_id = get_tool_version( app, parent_id )
- if not tool_version_using_tool_guid:
- tool_version_using_tool_guid = app.install_model.ToolVersion( tool_id=tool_guid, tool_shed_repository=tool_shed_repository )
- context.add( tool_version_using_tool_guid )
- context.flush()
- if not tool_version_using_parent_id:
- tool_version_using_parent_id = app.install_model.ToolVersion( tool_id=parent_id, tool_shed_repository=tool_shed_repository )
- context.add( tool_version_using_parent_id )
- context.flush()
- tool_version_association = get_tool_version_association( app,
- tool_version_using_parent_id,
- tool_version_using_tool_guid )
- if not tool_version_association:
- # Associate the two versions as parent / child.
- tool_version_association = app.install_model.ToolVersionAssociation( tool_id=tool_version_using_tool_guid.id,
- parent_id=tool_version_using_parent_id.id )
- context.add( tool_version_association )
- context.flush()
-
def install_tool_data_tables( app, tool_shed_repository, tool_index_sample_files ):
"""Only ever called from Galaxy end when installing"""
TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: davebgx: Delete images for tools that are no longer in the distribution.
by commits-noreply@bitbucket.org 22 Jul '14
by commits-noreply@bitbucket.org 22 Jul '14
22 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/246ad6f8e6c4/
Changeset: 246ad6f8e6c4
User: davebgx
Date: 2014-07-22 17:01:23
Summary: Delete images for tools that are no longer in the distribution.
Affected #: 97 files
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/emboss_icons/isochore.png
Binary file static/emboss_icons/isochore.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/barcode_splitter_output_example.png
Binary file static/fastx_icons/barcode_splitter_output_example.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fasta_clipping_histogram_1.png
Binary file static/fastx_icons/fasta_clipping_histogram_1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fasta_clipping_histogram_2.png
Binary file static/fastx_icons/fasta_clipping_histogram_2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fasta_clipping_histogram_3.png
Binary file static/fastx_icons/fasta_clipping_histogram_3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fasta_clipping_histogram_4.png
Binary file static/fastx_icons/fasta_clipping_histogram_4.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_nucleotides_distribution_1.png
Binary file static/fastx_icons/fastq_nucleotides_distribution_1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_nucleotides_distribution_2.png
Binary file static/fastx_icons/fastq_nucleotides_distribution_2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_nucleotides_distribution_3.png
Binary file static/fastx_icons/fastq_nucleotides_distribution_3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_nucleotides_distribution_4.png
Binary file static/fastx_icons/fastq_nucleotides_distribution_4.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_quality_boxplot_1.png
Binary file static/fastx_icons/fastq_quality_boxplot_1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_quality_boxplot_2.png
Binary file static/fastx_icons/fastq_quality_boxplot_2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastq_quality_boxplot_3.png
Binary file static/fastx_icons/fastq_quality_boxplot_3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastx_clipper_example.png
Binary file static/fastx_icons/fastx_clipper_example.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/fastx_icons/fastx_clipper_illustration.png
Binary file static/fastx_icons/fastx_clipper_illustration.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/gsummary_out.gif
Binary file static/images/gsummary_out.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/gsummary_out_groups.gif
Binary file static/images/gsummary_out_groups.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/hilbertvis1.png
Binary file static/images/hilbertvis1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/hilbertvis2.png
Binary file static/images/hilbertvis2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/hilbertvisDiagram.png
Binary file static/images/hilbertvisDiagram.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/pileup_parser_help1.png
Binary file static/images/pileup_parser_help1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/pileup_parser_help2.png
Binary file static/images/pileup_parser_help2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/pileup_parser_help3.png
Binary file static/images/pileup_parser_help3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/pileup_parser_help4.png
Binary file static/images/pileup_parser_help4.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/poisson2test_eqn1.png
Binary file static/images/poisson2test_eqn1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/poisson2test_eqn2.png
Binary file static/images/poisson2test_eqn2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_autoscale.png
Binary file static/images/t2ps_autoscale.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_autoscale_tree.png
Binary file static/images/t2ps_autoscale_tree.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_heatmap.png
Binary file static/images/t2ps_heatmap.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_ideal.png
Binary file static/images/t2ps_ideal.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_ideal_ssp.png
Binary file static/images/t2ps_ideal_ssp.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_missing_nodes.png
Binary file static/images/t2ps_missing_nodes.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2ps_node_label.png
Binary file static/images/t2ps_node_label.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2t_dropdown.png
Binary file static/images/t2t_dropdown.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2t_exclude.png
Binary file static/images/t2t_exclude.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2t_include.png
Binary file static/images/t2t_include.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2t_tree_exclude.png
Binary file static/images/t2t_tree_exclude.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/t2t_tree_include.png
Binary file static/images/t2t_tree_include.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/images/xy_example.jpg
Binary file static/images/xy_example.jpg has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/Q_plots.png
Binary file static/operation_icons/Q_plots.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/cluster.png
Binary file static/operation_icons/cluster.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/clusterMerge.png
Binary file static/operation_icons/clusterMerge.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/complement.png
Binary file static/operation_icons/complement.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/difference.png
Binary file static/operation_icons/difference.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_IvC_1.png
Binary file static/operation_icons/dwt_IvC_1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_IvC_2.png
Binary file static/operation_icons/dwt_IvC_2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_IvC_3.png
Binary file static/operation_icons/dwt_IvC_3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_IvC_4.png
Binary file static/operation_icons/dwt_IvC_4.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_IvC_5.png
Binary file static/operation_icons/dwt_IvC_5.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVa_1.png
Binary file static/operation_icons/dwt_cor_aVa_1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVa_2.png
Binary file static/operation_icons/dwt_cor_aVa_2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVa_3.png
Binary file static/operation_icons/dwt_cor_aVa_3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVa_4.png
Binary file static/operation_icons/dwt_cor_aVa_4.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVa_5.png
Binary file static/operation_icons/dwt_cor_aVa_5.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_1.png
Binary file static/operation_icons/dwt_cor_aVb_all_1.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_10.png
Binary file static/operation_icons/dwt_cor_aVb_all_10.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_2.png
Binary file static/operation_icons/dwt_cor_aVb_all_2.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_3.png
Binary file static/operation_icons/dwt_cor_aVb_all_3.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_4.png
Binary file static/operation_icons/dwt_cor_aVb_all_4.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_5.png
Binary file static/operation_icons/dwt_cor_aVb_all_5.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_6.png
Binary file static/operation_icons/dwt_cor_aVb_all_6.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_7.png
Binary file static/operation_icons/dwt_cor_aVb_all_7.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_8.png
Binary file static/operation_icons/dwt_cor_aVb_all_8.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_cor_aVb_all_9.png
Binary file static/operation_icons/dwt_cor_aVb_all_9.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/dwt_var_perClass.png
Binary file static/operation_icons/dwt_var_perClass.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/flanks_ex1.gif
Binary file static/operation_icons/flanks_ex1.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/flanks_ex2.gif
Binary file static/operation_icons/flanks_ex2.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_baseCoverage.gif
Binary file static/operation_icons/gops_baseCoverage.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_cluster.gif
Binary file static/operation_icons/gops_cluster.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_clusterFind.gif
Binary file static/operation_icons/gops_clusterFind.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_clusterMerge.gif
Binary file static/operation_icons/gops_clusterMerge.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_complement.gif
Binary file static/operation_icons/gops_complement.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_concatenate.gif
Binary file static/operation_icons/gops_concatenate.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_intersect.gif
Binary file static/operation_icons/gops_intersect.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_intersectOverlappingIntervals.gif
Binary file static/operation_icons/gops_intersectOverlappingIntervals.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_intersectOverlappingPieces.gif
Binary file static/operation_icons/gops_intersectOverlappingPieces.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_joinFullOuter.gif
Binary file static/operation_icons/gops_joinFullOuter.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_joinInner.gif
Binary file static/operation_icons/gops_joinInner.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_joinLeftOuter.gif
Binary file static/operation_icons/gops_joinLeftOuter.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_joinRecordsList.gif
Binary file static/operation_icons/gops_joinRecordsList.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_joinRightOuter.gif
Binary file static/operation_icons/gops_joinRightOuter.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_merge.gif
Binary file static/operation_icons/gops_merge.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_subtract.gif
Binary file static/operation_icons/gops_subtract.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_subtractOverlappingIntervals.gif
Binary file static/operation_icons/gops_subtractOverlappingIntervals.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/gops_subtractOverlappingPieces.gif
Binary file static/operation_icons/gops_subtractOverlappingPieces.gif has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/intersect.png
Binary file static/operation_icons/intersect.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/merge.png
Binary file static/operation_icons/merge.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/overlap.png
Binary file static/operation_icons/overlap.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/p_hist.png
Binary file static/operation_icons/p_hist.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/proximityEx.png
Binary file static/operation_icons/proximityEx.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/proximityInc.png
Binary file static/operation_icons/proximityInc.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/q_hist.png
Binary file static/operation_icons/q_hist.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/stacked_bars_plot.png
Binary file static/operation_icons/stacked_bars_plot.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/subtract.png
Binary file static/operation_icons/subtract.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/union.png
Binary file static/operation_icons/union.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/vicinityEnd.png
Binary file static/operation_icons/vicinityEnd.png has changed
diff -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac -r 246ad6f8e6c43753a8d04ffda2f5f5f019e86fc9 static/operation_icons/vicinityStart.png
Binary file static/operation_icons/vicinityStart.png has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Move some functions out of the tool shed's shed_util_common module into appropriate classes.
by commits-noreply@bitbucket.org 21 Jul '14
by commits-noreply@bitbucket.org 21 Jul '14
21 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/55b4556e0b7e/
Changeset: 55b4556e0b7e
User: greg
Date: 2014-07-21 22:44:32
Summary: Move some functions out of the tool shed's shed_util_common module into appropriate classes.
Affected #: 16 files
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -13,6 +13,7 @@
from tool_shed.galaxy_install.install_manager import InstallRepositoryManager
from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
+
from tool_shed.util import common_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -46,7 +46,7 @@
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
repository_id = kwd[ 'id' ]
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
try:
trans.app.installed_repository_manager.activate_repository( repository )
except Exception, e:
@@ -72,7 +72,7 @@
def browse_repository( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako',
repository=repository,
message=message,
@@ -105,7 +105,7 @@
action='purge_repository',
**kwd ) )
if operation == "activate or reinstall":
- repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
if repository.uninstalled:
# Since we're reinstalling the repository we need to find the latest changeset revision to which it can
# be updated so that we can reset the metadata if necessary. This will ensure that information about
@@ -206,7 +206,7 @@
def check_for_updates( self, trans, **kwd ):
"""Send a request to the relevant tool shed to see if there are any updates."""
repository_id = kwd.get( 'id', None )
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
params = '?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
( web.url_for( '/', qualified=True ),
@@ -232,7 +232,7 @@
status = kwd.get( 'status', 'done' )
remove_from_disk = kwd.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
- tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = \
suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
@@ -394,7 +394,7 @@
of the installed tool shed repository in Galaxy. We need it so that we can derive the tool shed from which
it was installed.
"""
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
if tool_shed_url is None or repository_name is None or repository_owner is None or changeset_revision is None:
message = "Unable to retrieve tool dependencies from the Tool Shed because one or more of the following required "
@@ -419,7 +419,7 @@
Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall
an updated revision of an uninstalled tool shed repository.
"""
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
if tool_shed_url is None or repository_name is None or repository_owner is None or changeset_revision is None:
message = "Unable to retrieve updated repository information from the Tool Shed because one or more of the following "
@@ -513,7 +513,7 @@
status = kwd.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
if repository_id is not None:
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
if repository is not None:
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
name = str( repository.name )
@@ -571,7 +571,7 @@
dependencies are included in the updated revision.
"""
updating_repository_id = kwd.get( 'updating_repository_id', None )
- repository = suc.get_installed_tool_shed_repository( trans.app, updating_repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, updating_repository_id )
# All received dependencies need to be installed - confirmed by the caller.
encoded_tool_dependencies_dict = kwd.get( 'encoded_tool_dependencies_dict', None )
if encoded_tool_dependencies_dict is not None:
@@ -747,7 +747,7 @@
if repository_id is None:
return trans.show_error_message( 'Missing required encoded repository id.' )
operation = kwd.get( 'operation', None )
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
if repository is None:
return trans.show_error_message( 'Invalid repository specified.' )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
@@ -866,15 +866,15 @@
message += ' and restart your Galaxy server to install tool dependencies.'
status = 'error'
installed_tool_dependencies_select_field = \
- suc.build_tool_dependencies_select_field( trans.app,
- tool_shed_repository=tool_shed_repository,
- name='inst_td_ids',
- uninstalled_only=False )
+ tool_dependency_util.build_tool_dependencies_select_field( trans.app,
+ tool_shed_repository=tool_shed_repository,
+ name='inst_td_ids',
+ uninstalled_only=False )
uninstalled_tool_dependencies_select_field = \
- suc.build_tool_dependencies_select_field( trans.app,
- tool_shed_repository=tool_shed_repository,
- name='uninstalled_tool_dependency_ids',
- uninstalled_only=True )
+ tool_dependency_util.build_tool_dependencies_select_field( trans.app,
+ tool_shed_repository=tool_shed_repository,
+ name='uninstalled_tool_dependency_ids',
+ uninstalled_only=True )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository_tool_dependencies.mako',
repository=tool_shed_repository,
installed_tool_dependencies_select_field=installed_tool_dependencies_select_field,
@@ -1276,7 +1276,7 @@
repository_id = kwd.get( 'id', None )
new_kwd = {}
if repository_id is not None:
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
if repository:
if repository.is_new:
if kwd.get( 'purge_repository_button', False ):
@@ -1314,7 +1314,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
- tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
install_repository_dependencies = CheckboxField.is_checked( kwd.get( 'install_repository_dependencies', '' ) )
@@ -1457,7 +1457,7 @@
action='browse_repositories',
message=message,
status=status ) )
- tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
rrm = RepairRepositoryManager( trans.app )
if kwd.get( 'repair_repository_button', False ):
encoded_repair_dict = kwd.get( 'repair_dict', None )
@@ -1475,7 +1475,7 @@
repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
repositories_for_repair.append( repository )
return self.repair_tool_shed_repositories( trans, rrm, repositories_for_repair, ordered_repo_info_dicts )
- tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
repair_dict = rrm.get_repair_dict( tool_shed_repository )
encoded_repair_dict = encoding_util.tool_shed_encode( repair_dict )
ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] )
@@ -1537,7 +1537,7 @@
repository_id = kwd.get( 'id', None )
latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
- tool_shed_repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ tool_shed_repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
metadata = tool_shed_repository.metadata
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( tool_shed_repository.tool_shed ) )
@@ -1724,7 +1724,7 @@
@web.require_admin
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
- repository = suc.get_installed_tool_shed_repository( trans.app, id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, id )
repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
@@ -1742,7 +1742,7 @@
persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
- suc.update_in_shed_tool_config( trans.app, repository )
+ irmm.update_in_shed_tool_config( repository )
trans.install_model.context.add( repository )
trans.install_model.context.flush()
message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
@@ -1763,7 +1763,7 @@
@web.require_admin
def reset_to_install( self, trans, **kwd ):
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
- repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
suc.set_repository_attributes( trans.app,
repository,
@@ -1789,7 +1789,7 @@
Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed
repository and update the metadata for the repository's revision in the Galaxy database.
"""
- repository = suc.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ),
str( repository.owner ),
@@ -1947,8 +1947,8 @@
tpm = tool_panel_manager.ToolPanelManager( trans.app )
tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
if tool_panel_dict is None:
- tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
- repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
+ tool_panel_dict = tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( repository )
+ repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict )
tpm.add_to_tool_panel( repository_name=str( repository.name ),
repository_clone_url=repository_clone_url,
changeset_revision=str( repository.installed_changeset_revision ),
@@ -2069,8 +2069,10 @@
repository_names_not_updated = []
updated_count = 0
for repository in trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
- .filter( trans.install_model.ToolShedRepository.table.c.deleted == False ):
- ok, updated = suc.check_or_update_tool_shed_status_for_installed_repository( trans.app, repository )
+ .filter( trans.install_model.ToolShedRepository.table.c.deleted == False ):
+ ok, updated = \
+ repository_maintenance_util.check_or_update_tool_shed_status_for_installed_repository( trans.app,
+ repository )
if ok:
success_count += 1
else:
@@ -2085,7 +2087,9 @@
else:
repository_id = kwd.get( 'id', None )
repository = suc.get_tool_shed_repository_by_id( trans.app, repository_id )
- ok, updated = suc.check_or_update_tool_shed_status_for_installed_repository( trans.app, repository )
+ ok, updated = \
+ repository_maintenance_util.check_or_update_tool_shed_status_for_installed_repository( trans.app,
+ repository )
if ok:
if updated:
message = "The tool shed status for repository <b>%s</b> has been updated." % str( repository.name )
@@ -2104,7 +2108,7 @@
def view_tool_metadata( self, trans, repository_id, tool_id, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( trans.app, repository_id )
repository_metadata = repository.metadata
shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -2755,7 +2755,7 @@
# This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template.
rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user )
invalid_file_tups, metadata_dict = \
- rmm.reset_all_metadata_on_repository_in_tool_shed( id, **kwd )
+ rmm.reset_all_metadata_on_repository_in_tool_shed( id )
if invalid_file_tups:
repository = suc.get_repository_in_tool_shed( trans.app, id )
message = tool_util.generate_message_for_invalid_tools( trans.app,
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -42,6 +42,7 @@
def __init__( self, app ):
self.app = app
+ self.install_model = self.app.install_model
self.INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file',
'setup_perl_environment', 'setup_python_environment',
'setup_r_environment', 'setup_ruby_environment', 'shell_command' ]
@@ -118,7 +119,7 @@
work_dir=work_dir,
current_dir=current_dir,
initial_download=False )
- if tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency.status in [ self.install_model.ToolDependency.installation_status.ERROR ]:
# If the tool_dependency status is in an error state, return it with no additional
# processing.
return tool_dependency
@@ -130,7 +131,6 @@
return tool_dependency
def install_and_build_package_via_fabric( self, tool_shed_repository, tool_dependency, actions_dict ):
- sa_session = self.app.install_model.context
try:
# There is currently only one fabric method.
tool_dependency = self.install_and_build_package( tool_shed_repository, tool_dependency, actions_dict )
@@ -221,8 +221,8 @@
tool_dependency,
error_message,
remove_installation_path=False )
- if tool_dependency and tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.INSTALLED,
- self.app.install_model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency and tool_dependency.status in [ self.install_model.ToolDependency.installation_status.INSTALLED,
+ self.install_model.ToolDependency.installation_status.ERROR ]:
installed_packages.append( tool_dependency )
if self.app.config.manage_dependency_relationships:
# Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager.
@@ -237,7 +237,6 @@
self.install_and_build_package(). The use of fabric is being eliminated, so some of these functions
may need to be renamed at some point.
"""
- sa_session = self.app.install_model.context
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
actions_dict = dict( install_dir=install_dir )
@@ -353,7 +352,7 @@
package_name=package_name,
actions_elem=actions_elem,
action_elem=None )
- if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED:
+ if tool_dependency.status == self.install_model.ToolDependency.installation_status.INSTALLED:
# If an <actions> tag was found that matches the current platform, and
# self.install_via_fabric() did not result in an error state, set binary_installed
# to True in order to skip any remaining platform-specific <actions> tags.
@@ -396,7 +395,7 @@
actions_elem=actions_elem,
action_elem=None )
if actions_elem.tag == 'action' and \
- tool_dependency.status != self.app.install_model.ToolDependency.installation_status.ERROR:
+ tool_dependency.status != self.install_model.ToolDependency.installation_status.ERROR:
# If the tool dependency is not in an error state, perform any final actions that have been
# defined within the actions_group tag set, but outside of an <actions> tag, which defines
# the recipe for installing and compiling from source.
@@ -416,19 +415,19 @@
package_name=package_name,
actions_elem=actions_elems,
action_elem=None )
- if tool_dependency.status != self.app.install_model.ToolDependency.installation_status.ERROR:
+ if tool_dependency.status != self.install_model.ToolDependency.installation_status.ERROR:
log.debug( 'Tool dependency %s version %s has been installed in %s.' % \
( str( package_name ), str( package_version ), str( install_dir ) ) )
return tool_dependency
def mark_tool_dependency_installed( self, tool_dependency ):
- if tool_dependency.status not in [ self.app.install_model.ToolDependency.installation_status.ERROR,
- self.app.install_model.ToolDependency.installation_status.INSTALLED ]:
+ if tool_dependency.status not in [ self.install_model.ToolDependency.installation_status.ERROR,
+ self.install_model.ToolDependency.installation_status.INSTALLED ]:
log.debug( 'Changing status for tool dependency %s from %s to %s.' % \
( str( tool_dependency.name ),
str( tool_dependency.status ),
- str( self.app.install_model.ToolDependency.installation_status.INSTALLED ) ) )
- status = self.app.install_model.ToolDependency.installation_status.INSTALLED
+ str( self.install_model.ToolDependency.installation_status.INSTALLED ) ) )
+ status = self.install_model.ToolDependency.installation_status.INSTALLED
tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
tool_dependency=tool_dependency,
status=status,
@@ -441,6 +440,8 @@
def __init__( self, app ):
self.app = app
+ self.install_model = self.app.install_model
+ self.tpm = tool_panel_manager.ToolPanelManager( self.app )
def get_repository_components_for_installation( self, encoded_tsr_id, encoded_tsr_ids, repo_info_dicts,
tool_panel_section_keys ):
@@ -497,10 +498,10 @@
def handle_repository_contents( self, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir,
tool_shed=None, tool_section=None, shed_tool_conf=None, reinstalling=False ):
"""
- Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy
- (never the tool shed) when an administrator is installing a new repository or reinstalling an uninstalled repository.
+ Generate the metadata for the installed tool shed repository, among other things.
+ This method is called when an administrator is installing a new repository or
+ reinstalling an uninstalled repository.
"""
- install_model = self.app.install_model
shed_config_dict = self.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
irmm = InstalledRepositoryMetadataManager( self.app )
metadata_dict, invalid_file_tups = \
@@ -518,8 +519,8 @@
tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( self.app, tool_shed_repository )
if tool_shed_status_dict:
tool_shed_repository.tool_shed_status = tool_shed_status_dict
- install_model.context.add( tool_shed_repository )
- install_model.context.flush()
+ self.install_model.context.add( tool_shed_repository )
+ self.install_model.context.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app,
tool_shed_repository,
@@ -536,13 +537,12 @@
self.app.config.shed_tool_data_table_config,
persist=True )
if 'tools' in metadata_dict:
- tpm = tool_panel_manager.ToolPanelManager( self.app )
- tool_panel_dict = tpm.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
+ tool_panel_dict = self.tpm.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
- repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict )
+ repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
repository_tools_tups = tool_util.handle_missing_data_table_entry( self.app,
@@ -561,14 +561,14 @@
sample_files,
tool_path=tool_path,
sample_files_copied=sample_files_copied )
- tpm.add_to_tool_panel( repository_name=tool_shed_repository.name,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- repository_tools_tups=repository_tools_tups,
- owner=tool_shed_repository.owner,
- shed_tool_conf=shed_tool_conf,
- tool_panel_dict=tool_panel_dict,
- new_install=True )
+ self.tpm.add_to_tool_panel( repository_name=tool_shed_repository.name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=tool_shed_repository.owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=True )
if 'data_manager' in metadata_dict:
new_data_managers = data_manager_util.install_data_managers( self.app,
self.app.config.shed_data_manager_config_file,
@@ -578,11 +578,11 @@
tool_shed_repository,
repository_tools_tups )
if 'datatypes' in metadata_dict:
- tool_shed_repository.status = install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+ tool_shed_repository.status = self.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
- install_model.context.add( tool_shed_repository )
- install_model.context.flush()
+ self.install_model.context.add( tool_shed_repository )
+ self.install_model.context.flush()
files_dir = relative_install_dir
if shed_config_dict.get( 'tool_path' ):
files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
@@ -628,7 +628,6 @@
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts
def initiate_repository_installation( self, installation_dict ):
- install_model = self.app.install_model
# The following installation_dict entries are all required.
created_or_updated_tool_shed_repositories = installation_dict[ 'created_or_updated_tool_shed_repositories' ]
filtered_repo_info_dicts = installation_dict[ 'filtered_repo_info_dicts' ]
@@ -648,11 +647,10 @@
tool_shed_url = installation_dict[ 'tool_shed_url' ]
# Handle contained tools.
if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section_label or tool_panel_section_id ):
- tpm = tool_panel_manager.ToolPanelManager( self.app )
tool_panel_section_key, tool_section = \
- tpm.handle_tool_panel_section( self.app.toolbox,
- tool_panel_section_id=tool_panel_section_id,
- new_tool_panel_section_label=new_tool_panel_section_label )
+ self.tpm.handle_tool_panel_section( self.app.toolbox,
+ tool_panel_section_id=tool_panel_section_id,
+ new_tool_panel_section_label=new_tool_panel_section_label )
else:
tool_panel_section_key = None
tool_section = None
@@ -675,12 +673,12 @@
tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories ]
tool_shed_repositories = []
for tsr_id in tsr_ids:
- tsr = install_model.context.query( install_model.ToolShedRepository ).get( tsr_id )
+ tsr = self.install_model.context.query( self.install_model.ToolShedRepository ).get( tsr_id )
tool_shed_repositories.append( tsr )
clause_list = []
for tsr_id in tsr_ids:
- clause_list.append( install_model.ToolShedRepository.table.c.id == tsr_id )
- query = install_model.context.query( install_model.ToolShedRepository ).filter( or_( *clause_list ) )
+ clause_list.append( self.install_model.ToolShedRepository.table.c.id == tsr_id )
+ query = self.install_model.context.query( self.install_model.ToolShedRepository ).filter( or_( *clause_list ) )
return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids
def install( self, tool_shed_url, name, owner, changeset_revision, install_options ):
@@ -726,7 +724,7 @@
shed_tool_conf = install_options.get( 'shed_tool_conf', None )
if shed_tool_conf:
# Get the tool_path setting.
- index, shed_conf_dict = suc.get_shed_tool_conf_dict( self.app, shed_tool_conf )
+ index, shed_conf_dict = self.tpm.get_shed_tool_conf_dict( shed_tool_conf )
tool_path = shed_conf_dict[ 'tool_path' ]
else:
# Pick a semi-random shed-related tool panel configuration file and get the tool_path setting.
@@ -792,11 +790,10 @@
self.order_components_for_installation( tsr_ids, repo_info_dicts, tool_panel_section_keys=tool_panel_section_keys )
# Install the repositories, keeping track of each one for later display.
for index, tsr_id in enumerate( ordered_tsr_ids ):
- install_model = self.app.install_model
- tool_shed_repository = install_model.context.query( install_model.ToolShedRepository ) \
- .get( self.app.security.decode_id( tsr_id ) )
- if tool_shed_repository.status in [ install_model.ToolShedRepository.installation_status.NEW,
- install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ tool_shed_repository = self.install_model.context.query( self.install_model.ToolShedRepository ) \
+ .get( self.app.security.decode_id( tsr_id ) )
+ if tool_shed_repository.status in [ self.install_model.ToolShedRepository.installation_status.NEW,
+ self.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
repo_info_dict = ordered_repo_info_dicts[ index ]
tool_panel_section_key = ordered_tool_panel_section_keys[ index ]
self.install_tool_shed_repository( tool_shed_repository,
@@ -814,7 +811,6 @@
def install_tool_shed_repository( self, tool_shed_repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path,
install_tool_dependencies, reinstalling=False ):
- install_model = self.app.install_model
if tool_panel_section_key:
try:
tool_section = self.app.toolbox.tool_panel[ tool_panel_section_key ]
@@ -827,9 +823,8 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- install_model.ToolShedRepository.installation_status.CLONING )
+ self.update_tool_shed_repository_status( tool_shed_repository,
+ self.install_model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url,
@@ -859,13 +854,12 @@
tool_section=tool_section,
shed_tool_conf=shed_tool_conf,
reinstalling=reinstalling )
- install_model.context.refresh( tool_shed_repository )
+ self.install_model.context.refresh( tool_shed_repository )
metadata = tool_shed_repository.metadata
if 'tools' in metadata:
# Get the tool_versions from the tool shed for each tool in the installed change set.
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ self.update_tool_shed_repository_status( tool_shed_repository,
+ self.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( tool_shed_repository.tool_shed ) )
params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( tool_shed_repository.name ),
str( tool_shed_repository.owner ),
@@ -885,9 +879,8 @@
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itsr" )
# Install tool dependencies.
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ self.update_tool_shed_repository_status( tool_shed_repository,
+ self.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', install_dir )
itdm = InstallToolDependencyManager( self.app )
@@ -896,9 +889,8 @@
tool_dependencies=tool_shed_repository.tool_dependencies,
from_tool_migration_manager=False )
basic_util.remove_dir( work_dir )
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- install_model.ToolShedRepository.installation_status.INSTALLED )
+ self.update_tool_shed_repository_status( tool_shed_repository,
+ self.install_model.ToolShedRepository.installation_status.INSTALLED )
if self.app.config.manage_dependency_relationships:
# Add the installed repository and any tool dependencies to the in-memory dictionaries
# in the installed_repository_manager.
@@ -907,7 +899,7 @@
# An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
suc.set_repository_attributes( self.app,
tool_shed_repository,
- status=install_model.ToolShedRepository.installation_status.ERROR,
+ status=self.install_model.ToolShedRepository.installation_status.ERROR,
error_message=error_message,
deleted=False,
uninstalled=False,
@@ -962,3 +954,13 @@
ordered_repo_info_dicts.append( repo_info_dict )
ordered_tool_panel_section_keys.append( tool_panel_section_key )
return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys
+
+ def update_tool_shed_repository_status( self, tool_shed_repository, status, error_message=None ):
+ """
+ Update the status of a tool shed repository in the process of being installed into Galaxy.
+ """
+ tool_shed_repository.status = status
+ if error_message:
+ tool_shed_repository.error_message = str( error_message )
+ self.install_model.context.add( tool_shed_repository )
+ self.install_model.context.flush()
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -14,6 +14,7 @@
from tool_shed.util import xml_util
from galaxy.model.orm import and_
+from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.galaxy_install.tools import tool_panel_manager
@@ -80,9 +81,10 @@
repository.deleted = False
repository.status = self.install_model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools_for_display_in_tool_panel:
+ irmm = InstalledRepositoryMetadataManager( self.app )
tpm = tool_panel_manager.ToolPanelManager( self.app )
metadata = repository.metadata
- repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata )
+ repository_tools_tups = irmm.get_repository_tools_tups( metadata )
# Reload tools into the appropriate tool panel section.
tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
tpm.add_to_tool_panel( repository.name,
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
--- a/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
+++ b/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
@@ -5,11 +5,14 @@
from galaxy.util import inflector
from galaxy.web.form_builder import SelectField
+from tool_shed.galaxy_install.tools import tool_panel_manager
from tool_shed.metadata import metadata_generator
from tool_shed.util import common_util
+from tool_shed.util import repository_maintenance_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_util
+from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -19,6 +22,7 @@
def __init__( self, app ):
super( InstalledRepositoryMetadataManager, self ).__init__( app )
self.app = app
+ self.tpm = tool_panel_manager.ToolPanelManager( self.app )
def build_repository_ids_select_field( self, name='repository_ids', multiple=True, display='checkboxes' ):
"""Generate the current list of repositories for resetting metadata."""
@@ -46,11 +50,32 @@
return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \
.filter( self.app.install_model.ToolShedRepository.table.c.uninstalled == False )
+ def get_repository_tools_tups( self, metadata_dict ):
+ """
+ Return a list of tuples of the form (relative_path, guid, tool) for each tool defined
+ in the received tool shed repository metadata.
+ """
+ repository_tools_tups = []
+ index, shed_conf_dict = self.tpm.get_shed_tool_conf_dict( metadata_dict.get( 'shed_config_filename' ) )
+ if 'tools' in metadata_dict:
+ for tool_dict in metadata_dict[ 'tools' ]:
+ load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
+ if shed_conf_dict.get( 'tool_path' ):
+ load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
+ guid = tool_dict.get( 'guid', None )
+ if relative_path and guid:
+ tool = self.app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid )
+ else:
+ tool = None
+ if tool:
+ repository_tools_tups.append( ( relative_path, guid, tool ) )
+ return repository_tools_tups
+
def reset_all_metadata_on_installed_repository( self, id ):
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
invalid_file_tups = []
metadata_dict = {}
- repository = suc.get_installed_tool_shed_repository( self.app, id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( self.app, id )
repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( self.app )
if relative_install_dir:
@@ -67,7 +92,7 @@
persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
- suc.update_in_shed_tool_config( self.app, repository )
+ self.update_in_shed_tool_config( repository )
self.app.install_model.context.add( repository )
self.app.install_model.context.flush()
log.debug( 'Metadata has been reset on repository %s.' % repository.name )
@@ -90,7 +115,7 @@
unsuccessful_count = 0
for repository_id in repository_ids:
try:
- repository = suc.get_installed_tool_shed_repository( self.app, repository_id )
+ repository = repository_maintenance_util.get_installed_tool_shed_repository( self.app, repository_id )
owner = str( repository.owner )
invalid_file_tups, metadata_dict = \
self.reset_all_metadata_on_installed_repository( repository_id )
@@ -117,3 +142,49 @@
message = 'Select at least one repository to on which to reset all metadata.'
status = 'error'
return message, status
+
+ def tool_shed_from_repository_clone_url( self, repository_clone_url ):
+ """Given a repository clone URL, return the tool shed that contains the repository."""
+ return common_util.remove_protocol_and_user_from_clone_url( repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' )
+
+ def update_in_shed_tool_config( self, repository ):
+ """
+ A tool shed repository is being updated so change the shed_tool_conf file. Parse the config
+ file to generate the entire list of config_elems instead of using the in-memory list.
+ """
+ shed_conf_dict = repository.get_shed_config_dict( self.app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+ tool_panel_dict = self.tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( repository )
+ repository_tools_tups = self.get_repository_tools_tups( repository.metadata )
+ clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
+ cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( clone_url )
+ tool_shed = self.tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = suc.get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = self.tpm.generate_tool_elem( tool_shed,
+ repository.name,
+ repository.changeset_revision,
+ repository.owner or '',
+ tool_config_filename,
+ tool,
+ None )
+ config_elems = []
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ self.tpm.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/repair_repository_manager.py
--- a/lib/tool_shed/galaxy_install/repair_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py
@@ -190,6 +190,7 @@
if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.ERROR ]:
repair_dict = add_repair_dict_entry( repository.name, repository.error_message )
else:
+ irm = install_manager.InstallRepositoryManager( self.app )
# We have an installed tool shed repository, so handle tool dependencies if necessary.
if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata:
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" )
@@ -204,8 +205,7 @@
error_message=None,
remove_from_disk=True )
# Install tool dependencies.
- suc.update_tool_shed_repository_status( self.app,
- repository,
+ irm.update_tool_shed_repository_status( repository,
self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( self.app ) )
@@ -218,5 +218,6 @@
if installed_tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]:
repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message )
basic_util.remove_dir( work_dir )
- suc.update_tool_shed_repository_status( self.app, repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
+ irm.update_tool_shed_repository_status( repository,
+ self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
return repair_dict
\ No newline at end of file
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
--- a/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
+++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
@@ -206,7 +206,7 @@
# Set changeset_revision here so suc.create_or_update_tool_shed_repository will find
# the previously installed and uninstalled repository instead of creating a new record.
changeset_revision = repository_db_record.installed_changeset_revision
- suc.reset_previously_installed_repository( install_model, repository_db_record )
+ self.reset_previously_installed_repository( repository_db_record )
can_update_db_record = True
else:
# No record exists in the database for the repository currently being processed.
@@ -409,3 +409,26 @@
return False
return True
return False
+
+ def reset_previously_installed_repository( self, repository ):
+ """
+ Reset the attributes of a tool_shed_repository that was previously installed.
+ The repository will be in some state other than INSTALLED, so all attributes
+ will be set to the default NEW state. This will enable the repository to be
+ freshly installed.
+ """
+ debug_msg = "Resetting tool_shed_repository '%s' for installation.\n" % str( repository.name )
+ debug_msg += "The current state of the tool_shed_repository is:\n"
+ debug_msg += "deleted: %s\n" % str( repository.deleted )
+ debug_msg += "tool_shed_status: %s\n" % str( repository.tool_shed_status )
+ debug_msg += "uninstalled: %s\n" % str( repository.uninstalled )
+ debug_msg += "status: %s\n" % str( repository.status )
+ debug_msg += "error_message: %s\n" % str( repository.error_message )
+ log.debug( debug_msg )
+ repository.deleted = False
+ repository.tool_shed_status = None
+ repository.uninstalled = False
+ repository.status = self.app.install_model.ToolShedRepository.installation_status.NEW
+ repository.error_message = None
+ self.app.install_model.context.add( repository )
+ self.app.install_model.context.flush()
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -74,7 +74,7 @@
self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, defined_tool_shed_url )
self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed_url )
self.repository_owner = common_util.REPOSITORY_OWNER
- index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
+ index, self.shed_config_dict = self.tpm.get_shed_tool_conf_dict( self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to
# make sure the appropriate tools are defined in tool_conf.xml. If no tools
# associated with the migration stage are defined, no repositories will be installed
@@ -442,7 +442,7 @@
tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict )
+ repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
repository_tools_tups = tool_util.handle_missing_data_table_entry( self.app,
@@ -471,12 +471,12 @@
new_install=True )
if install_dependencies and tool_dependencies and has_tool_dependencies:
# Install tool dependencies.
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
+ irm = install_manager.InstallRepositoryManager( self.app )
+ itdm = install_manager.InstallToolDependencyManager( self.app )
+ irm.update_tool_shed_repository_status( tool_shed_repository,
self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
- itdm = install_manager.InstallToolDependencyManager( self.app )
installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
tool_dependencies=tool_dependencies,
@@ -533,6 +533,7 @@
if cloned_ok and is_installed:
print "Skipping automatic install of repository '", tool_shed_repository.name, "' because it has already been installed in location ", clone_dir
else:
+ irm = install_manager.InstallRepositoryManager( self.app )
repository_clone_url = os.path.join( self.tool_shed_url, 'repos', tool_shed_repository.owner, tool_shed_repository.name )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( clone_dir, tool_shed_repository.name )
@@ -542,8 +543,7 @@
tool_shed_repository.owner,
tool_shed_repository.installed_changeset_revision )
if not cloned_ok:
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
+ irm.update_tool_shed_repository_status( tool_shed_repository,
self.app.install_model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok and not is_installed:
@@ -556,8 +556,7 @@
self.app.install_model.context.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
+ irm.update_tool_shed_repository_status( tool_shed_repository,
self.app.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -595,11 +594,11 @@
parent_id=tool_version_using_old_id.id )
self.app.install_model.context.add( tool_version_association )
self.app.install_model.context.flush()
- suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
+ irm.update_tool_shed_repository_status( tool_shed_repository,
+ self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
else:
print 'Error attempting to clone repository %s: %s' % ( str( tool_shed_repository.name ), str( error_message ) )
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
+ irm.update_tool_shed_repository_status( tool_shed_repository,
self.app.install_model.ToolShedRepository.installation_status.ERROR,
error_message=error_message )
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/galaxy_install/tools/tool_panel_manager.py
--- a/lib/tool_shed/galaxy_install/tools/tool_panel_manager.py
+++ b/lib/tool_shed/galaxy_install/tools/tool_panel_manager.py
@@ -1,5 +1,7 @@
import logging
import os
+import shutil
+import tempfile
import galaxy.tools
from galaxy.tools.search import ToolBoxSearch
@@ -19,9 +21,11 @@
self.app = app
def add_to_shed_tool_config( self, shed_tool_conf_dict, elem_list ):
- # A tool shed repository is being installed so change the shed_tool_conf file. Parse the
- # config file to generate the entire list of config_elems instead of using the in-memory list
- # since it will be a subset of the entire list if one or more repositories have been deactivated.
+ """
+ "A tool shed repository is being installed so change the shed_tool_conf file. Parse the
+ config file to generate the entire list of config_elems instead of using the in-memory list
+ since it will be a subset of the entire list if one or more repositories have been deactivated.
+ """
shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = []
@@ -34,13 +38,13 @@
for elem_entry in elem_list:
config_elems.append( elem_entry )
# Persist the altered shed_tool_config file.
- suc.config_elems_to_xml_file( self.app, config_elems, shed_tool_conf, tool_path )
+ self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
def add_to_tool_panel( self, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, owner,
shed_tool_conf, tool_panel_dict, new_install=True ):
"""A tool shed repository is being installed or updated so handle tool panel alterations accordingly."""
# We need to change the in-memory version and the file system version of the shed_tool_conf file.
- index, shed_tool_conf_dict = suc.get_shed_tool_conf_dict( self.app, shed_tool_conf )
+ index, shed_tool_conf_dict = self.get_shed_tool_conf_dict( shed_tool_conf )
tool_path = shed_tool_conf_dict[ 'tool_path' ]
# Generate the list of ElementTree Element objects for each section or tool.
elem_list = self.generate_tool_panel_elem_list( repository_name,
@@ -81,6 +85,44 @@
self.app.toolbox.write_integrated_tool_panel_config_file()
self.app.toolbox_search = ToolBoxSearch( self.app.toolbox )
+ def config_elems_to_xml_file( self, config_elems, config_filename, tool_path ):
+ """
+ Persist the current in-memory list of config_elems to a file named by the
+ value of config_filename.
+ """
+ fd, filename = tempfile.mkstemp( prefix="tmp-toolshed-cetxf" )
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
+ for elem in config_elems:
+ os.write( fd, '%s' % xml_util.xml_to_string( elem, use_indent=True ) )
+ os.write( fd, '</toolbox>\n' )
+ os.close( fd )
+ shutil.move( filename, os.path.abspath( config_filename ) )
+ os.chmod( config_filename, 0644 )
+
+ def generate_tool_elem( self, tool_shed, repository_name, changeset_revision, owner, tool_file_path,
+ tool, tool_section ):
+ """Create and return an ElementTree tool Element."""
+ if tool_section is not None:
+ tool_elem = XmlET.SubElement( tool_section, 'tool' )
+ else:
+ tool_elem = XmlET.Element( 'tool' )
+ tool_elem.attrib[ 'file' ] = tool_file_path
+ tool_elem.attrib[ 'guid' ] = tool.guid
+ tool_shed_elem = XmlET.SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = XmlET.SubElement( tool_elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = XmlET.SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = XmlET.SubElement( tool_elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ id_elem = XmlET.SubElement( tool_elem, 'id' )
+ id_elem.text = tool.id
+ version_elem = XmlET.SubElement( tool_elem, 'version' )
+ version_elem.text = tool.version
+ return tool_elem
+
def generate_tool_panel_dict_for_new_install( self, tool_dicts, tool_section=None ):
"""
When installing a repository that contains tools, all tools must currently be defined
@@ -125,6 +167,60 @@
tool_panel_dict[ guid ] = tool_section_dicts
return tool_panel_dict
+ def generate_tool_panel_dict_from_shed_tool_conf_entries( self, repository ):
+ """
+ Keep track of the section in the tool panel in which this repository's tools
+ will be contained by parsing the shed_tool_conf in which the repository's tools
+ are defined and storing the tool panel definition of each tool in the repository.
+ This method is called only when the repository is being deactivated or un-installed
+ and allows for activation or re-installation using the original layout.
+ """
+ tool_panel_dict = {}
+ shed_tool_conf, tool_path, relative_install_dir = \
+ suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
+ metadata = repository.metadata
+ # Create a dictionary of tool guid and tool config file name for each tool in the repository.
+ guids_and_configs = {}
+ if 'tools' in metadata:
+ for tool_dict in metadata[ 'tools' ]:
+ guid = tool_dict[ 'guid' ]
+ tool_config = tool_dict[ 'tool_config' ]
+ file_name = basic_util.strip_path( tool_config )
+ guids_and_configs[ guid ] = file_name
+ # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree is None:
+ return tool_panel_dict
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ guid = elem.get( 'guid' )
+ if guid in guids_and_configs:
+ # The tool is displayed in the tool panel outside of any tool sections.
+ tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
+ if guid in tool_panel_dict:
+ tool_panel_dict[ guid ].append( tool_section_dict )
+ else:
+ tool_panel_dict[ guid ] = [ tool_section_dict ]
+ elif elem.tag == 'section':
+ section_id = elem.get( 'id' ) or ''
+ section_name = elem.get( 'name' ) or ''
+ section_version = elem.get( 'version' ) or ''
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ guid = section_elem.get( 'guid' )
+ if guid in guids_and_configs:
+ # The tool is displayed in the tool panel inside the current tool section.
+ tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
+ id=section_id,
+ name=section_name,
+ version=section_version )
+ if guid in tool_panel_dict:
+ tool_panel_dict[ guid ].append( tool_section_dict )
+ else:
+ tool_panel_dict[ guid ] = [ tool_section_dict ]
+ return tool_panel_dict
+
def generate_tool_panel_elem_list( self, repository_name, repository_clone_url, changeset_revision,
tool_panel_dict, repository_tools_tups, owner='' ):
"""Generate a list of ElementTree Element objects for each section or tool."""
@@ -157,21 +253,21 @@
if tup_guid == guid:
break
if inside_section:
- tool_elem = suc.generate_tool_elem( tool_shed,
- repository_name,
- changeset_revision,
- owner,
- tool_file_path,
- tool,
- tool_section )
+ tool_elem = self.generate_tool_elem( tool_shed,
+ repository_name,
+ changeset_revision,
+ owner,
+ tool_file_path,
+ tool,
+ tool_section )
else:
- tool_elem = suc.generate_tool_elem( tool_shed,
- repository_name,
- changeset_revision,
- owner,
- tool_file_path,
- tool,
- None )
+ tool_elem = self.generate_tool_elem( tool_shed,
+ repository_name,
+ changeset_revision,
+ owner,
+ tool_file_path,
+ tool,
+ None )
if inside_section:
if section_in_elem_list:
elem_list[ index ] = tool_section
@@ -238,6 +334,19 @@
log.debug( "Loading new tool panel section: %s" % str( tool_section.name ) )
return tool_panel_section_key, tool_section
+ def get_shed_tool_conf_dict( self, shed_tool_conf ):
+ """
+ Return the in-memory version of the shed_tool_conf file, which is stored in
+ the config_elems entry in the shed_tool_conf_dict associated with the file.
+ """
+ for index, shed_tool_conf_dict in enumerate( self.app.toolbox.shed_tool_confs ):
+ if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
+ return index, shed_tool_conf_dict
+ else:
+ file_name = basic_util.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
+ if shed_tool_conf == file_name:
+ return index, shed_tool_conf_dict
+
def handle_tool_panel_section( self, toolbox, tool_panel_section_id=None, new_tool_panel_section_label=None ):
"""Return a ToolSection object retrieved from the current in-memory tool_panel."""
# If tool_panel_section_id is received, the section exists in the tool panel. In this
@@ -299,10 +408,12 @@
return tool_section, tool_panel_section_key
def remove_from_shed_tool_config( self, shed_tool_conf_dict, guids_to_remove ):
- # A tool shed repository is being uninstalled so change the shed_tool_conf file.
- # Parse the config file to generate the entire list of config_elems instead of
- # using the in-memory list since it will be a subset of the entire list if one
- # or more repositories have been deactivated.
+ """
+ A tool shed repository is being uninstalled so change the shed_tool_conf file.
+ Parse the config file to generate the entire list of config_elems instead of
+ using the in-memory list since it will be a subset of the entire list if one
+ or more repositories have been deactivated.
+ """
shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = []
@@ -330,7 +441,7 @@
for config_elem in config_elems_to_remove:
config_elems.remove( config_elem )
# Persist the altered in-memory version of the tool config.
- suc.config_elems_to_xml_file( self.app, config_elems, shed_tool_conf, tool_path )
+ self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( self, repository, shed_tool_conf, uninstall ):
"""
@@ -340,7 +451,7 @@
# Determine where the tools are currently defined in the tool panel and store this
# information so the tools can be displayed in the same way when the repository is
# activated or reinstalled.
- tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( self.app, repository )
+ tool_panel_dict = self.generate_tool_panel_dict_from_shed_tool_conf_entries( repository )
repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
self.app.install_model.context.add( repository )
self.app.install_model.context.flush()
@@ -351,7 +462,7 @@
for guid_to_remove in guids_to_remove:
if guid_to_remove in self.app.toolbox.tools_by_id:
del self.app.toolbox.tools_by_id[ guid_to_remove ]
- index, shed_tool_conf_dict = suc.get_shed_tool_conf_dict( self.app, shed_tool_conf )
+ index, shed_tool_conf_dict = self.get_shed_tool_conf_dict( shed_tool_conf )
if uninstall:
# Remove from the shed_tool_conf file on disk.
self.remove_from_shed_tool_config( shed_tool_conf_dict, guids_to_remove )
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/metadata/repository_metadata_manager.py
--- a/lib/tool_shed/metadata/repository_metadata_manager.py
+++ b/lib/tool_shed/metadata/repository_metadata_manager.py
@@ -951,11 +951,9 @@
unsuccessful_count = 0
for repository_id in repository_ids:
try:
- # We're in the tool shed.
repository = suc.get_repository_in_tool_shed( self.app, repository_id )
- owner = str( repository.user.username )
invalid_file_tups, metadata_dict = \
- self.reset_all_metadata_on_repository_in_tool_shed( self.user, repository_id )
+ self.reset_all_metadata_on_repository_in_tool_shed( repository_id )
if invalid_file_tups:
message = tool_util.generate_message_for_invalid_tools( self.app,
invalid_file_tups,
@@ -966,7 +964,7 @@
unsuccessful_count += 1
else:
log.debug( "Successfully reset metadata on repository %s owned by %s" % \
- ( str( repository.name ), owner ) )
+ ( str( repository.name ), str( repository.user.username ) ) )
successful_count += 1
except:
log.exception( "Error attempting to reset metadata on repository %s" % str( repository.name ) )
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/util/data_manager_util.py
--- a/lib/tool_shed/util/data_manager_util.py
+++ b/lib/tool_shed/util/data_manager_util.py
@@ -1,7 +1,10 @@
import logging
import os
+
+from tool_shed.galaxy_install.tools import tool_panel_manager
+
+from tool_shed.util import shed_util_common as suc
from tool_shed.util import xml_util
-import tool_shed.util.shed_util_common as suc
log = logging.getLogger( __name__ )
@@ -17,6 +20,7 @@
def install_data_managers( app, shed_data_manager_conf_filename, metadata_dict, shed_config_dict, relative_install_dir, repository, repository_tools_tups ):
rval = []
if 'data_manager' in metadata_dict:
+ tpm = tool_panel_manager.ToolPanelManager( app )
repository_tools_by_guid = {}
for tool_tup in repository_tools_tups:
repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
@@ -70,13 +74,13 @@
elem.set( 'shed_conf_file', shed_config_dict['config_filename'] )
if elem.get( 'tool_file', None ) is not None:
del elem.attrib[ 'tool_file' ] #remove old tool_file info
- tool_elem = suc.generate_tool_elem( repository.tool_shed,
- repository.name,
- repository.installed_changeset_revision,
- repository.owner,
- tool_config_filename,
- tool,
- None )
+ tool_elem = tpm.generate_tool_elem( repository.tool_shed,
+ repository.name,
+ repository.installed_changeset_revision,
+ repository.owner,
+ tool_config_filename,
+ tool,
+ None )
elem.insert( 0, tool_elem )
data_manager = app.data_managers.load_manager_from_elem( elem, tool_path=shed_config_dict.get( 'tool_path', '' ), replace_existing=True )
if data_manager:
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/util/repository_maintenance_util.py
--- a/lib/tool_shed/util/repository_maintenance_util.py
+++ b/lib/tool_shed/util/repository_maintenance_util.py
@@ -40,6 +40,20 @@
config.write( new_file )
new_file.close()
+def check_or_update_tool_shed_status_for_installed_repository( app, repository ):
+ updated = False
+ tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( app, repository )
+ if tool_shed_status_dict:
+ ok = True
+ if tool_shed_status_dict != repository.tool_shed_status:
+ repository.tool_shed_status = tool_shed_status_dict
+ app.install_model.context.add( repository )
+ app.install_model.context.flush()
+ updated = True
+ else:
+ ok = False
+ return ok, updated
+
def create_repo_info_dict( app, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None,
repository=None, repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
"""
@@ -171,6 +185,11 @@
sa_session.flush()
return role
+def get_installed_tool_shed_repository( app, id ):
+ """Get a tool shed repository record from the Galaxy database defined by the id."""
+ return app.install_model.context.query( app.install_model.ToolShedRepository ) \
+ .get( app.security.decode_id( id ) )
+
def get_repo_info_dict( app, user, repository_id, changeset_revision ):
repository = suc.get_repository_in_tool_shed( app, repository_id )
repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1,24 +1,22 @@
+import json
import logging
import os
import re
import shutil
import string
-import tempfile
+import sqlalchemy.orm.exc
+
from galaxy import util
-from galaxy.util import asbool
-from galaxy.util import json
from galaxy.web import url_for
-from galaxy.web.form_builder import SelectField
from galaxy.datatypes import checkers
from galaxy.model.orm import and_
from galaxy.model.orm import or_
-import sqlalchemy.orm.exc
+
from tool_shed.util import basic_util
from tool_shed.util import common_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
-from tool_shed.util import xml_util
-import tool_shed.repository_types.util as rt_util
+
from xml.etree import ElementTree as XmlET
from urllib2 import HTTPError
@@ -84,49 +82,6 @@
'${host}'
"""
-def build_tool_dependencies_select_field( app, tool_shed_repository, name, multiple=True, display='checkboxes', uninstalled_only=False ):
- """Method called from Galaxy to generate the current list of tool dependency ids for an installed tool shed repository."""
- tool_dependencies_select_field = SelectField( name=name, multiple=multiple, display=display )
- for tool_dependency in tool_shed_repository.tool_dependencies:
- if uninstalled_only:
- if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
- app.install_model.ToolDependency.installation_status.UNINSTALLED ]:
- continue
- else:
- if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
- app.install_model.ToolDependency.installation_status.UNINSTALLED ]:
- continue
- option_label = '%s version %s' % ( str( tool_dependency.name ), str( tool_dependency.version ) )
- option_value = app.security.encode_id( tool_dependency.id )
- tool_dependencies_select_field.add_option( option_label, option_value )
- return tool_dependencies_select_field
-
-def check_or_update_tool_shed_status_for_installed_repository( app, repository ):
- updated = False
- tool_shed_status_dict = get_tool_shed_status_for_installed_repository( app, repository )
- if tool_shed_status_dict:
- ok = True
- if tool_shed_status_dict != repository.tool_shed_status:
- repository.tool_shed_status = tool_shed_status_dict
- app.install_model.context.add( repository )
- app.install_model.context.flush()
- updated = True
- else:
- ok = False
- return ok, updated
-
-def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
- """Persist the current in-memory list of config_elems to a file named by the value of config_filename."""
- fd, filename = tempfile.mkstemp( prefix="tmp-toolshed-cetxf" )
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
- for elem in config_elems:
- os.write( fd, '%s' % xml_util.xml_to_string( elem, use_indent=True ) )
- os.write( fd, '</toolbox>\n' )
- os.close( fd )
- shutil.move( filename, os.path.abspath( config_filename ) )
- os.chmod( config_filename, 0644 )
-
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url,
metadata_dict, status, current_changeset_revision=None, owner='', dist_to_shed=False ):
"""
@@ -248,28 +203,6 @@
sharable_url += '/%s' % changeset_revision
return sharable_url
-def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
- """Create and return an ElementTree tool Element."""
- if tool_section is not None:
- tool_elem = XmlET.SubElement( tool_section, 'tool' )
- else:
- tool_elem = XmlET.Element( 'tool' )
- tool_elem.attrib[ 'file' ] = tool_file_path
- tool_elem.attrib[ 'guid' ] = tool.guid
- tool_shed_elem = XmlET.SubElement( tool_elem, 'tool_shed' )
- tool_shed_elem.text = tool_shed
- repository_name_elem = XmlET.SubElement( tool_elem, 'repository_name' )
- repository_name_elem.text = repository_name
- repository_owner_elem = XmlET.SubElement( tool_elem, 'repository_owner' )
- repository_owner_elem.text = owner
- changeset_revision_elem = XmlET.SubElement( tool_elem, 'installed_changeset_revision' )
- changeset_revision_elem.text = changeset_revision
- id_elem = XmlET.SubElement( tool_elem, 'id' )
- id_elem.text = tool.id
- version_elem = XmlET.SubElement( tool_elem, 'version' )
- version_elem.text = tool.version
- return tool_elem
-
def generate_tool_guid( repository_clone_url, tool ):
"""
Generate a guid for the installed tool. It is critical that this guid matches the guid for
@@ -279,57 +212,6 @@
tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
-def generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository ):
- """
- Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed_tool_conf in
- which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
- only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
- """
- tool_panel_dict = {}
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( app, repository )
- metadata = repository.metadata
- # Create a dictionary of tool guid and tool config file name for each tool in the repository.
- guids_and_configs = {}
- if 'tools' in metadata:
- for tool_dict in metadata[ 'tools' ]:
- guid = tool_dict[ 'guid' ]
- tool_config = tool_dict[ 'tool_config' ]
- file_name = basic_util.strip_path( tool_config )
- guids_and_configs[ guid ] = file_name
- # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
- tree, error_message = xml_util.parse_xml( shed_tool_conf )
- if tree is None:
- return tool_panel_dict
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- guid = elem.get( 'guid' )
- if guid in guids_and_configs:
- # The tool is displayed in the tool panel outside of any tool sections.
- tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
- if guid in tool_panel_dict:
- tool_panel_dict[ guid ].append( tool_section_dict )
- else:
- tool_panel_dict[ guid ] = [ tool_section_dict ]
- elif elem.tag == 'section':
- section_id = elem.get( 'id' ) or ''
- section_name = elem.get( 'name' ) or ''
- section_version = elem.get( 'version' ) or ''
- for section_elem in elem:
- if section_elem.tag == 'tool':
- guid = section_elem.get( 'guid' )
- if guid in guids_and_configs:
- # The tool is displayed in the tool panel inside the current tool section.
- tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
- id=section_id,
- name=section_name,
- version=section_version )
- if guid in tool_panel_dict:
- tool_panel_dict[ guid ].append( tool_section_dict )
- else:
- tool_panel_dict[ guid ] = [ tool_section_dict ]
- return tool_panel_dict
-
def generate_tool_shed_repository_install_dir( repository_clone_url, changeset_revision ):
"""
Generate a repository installation directory that guarantees repositories with the same
@@ -483,10 +365,6 @@
return ','.join( installing_repository_ids )
return installing_repository_ids
-def get_installed_tool_shed_repository( app, id ):
- """Get a tool shed repository record from the Galaxy database defined by the id."""
- return app.install_model.context.query( app.install_model.ToolShedRepository ).get( app.security.decode_id( id ) )
-
def get_latest_changeset_revision( app, repository, repo ):
repository_tip = repository.tip( app )
repository_metadata = get_repository_metadata_by_changeset_revision( app,
@@ -565,8 +443,10 @@
def get_or_create_tool_shed_repository( app, tool_shed, name, owner, changeset_revision ):
"""
- Return a tool shed repository database record defined by the combination of tool shed, repository name, repository owner and changeset_revision
- or installed_changeset_revision. A new tool shed repository record will be created if one is not located.
+ Return a tool shed repository database record defined by the combination of
+ tool shed, repository name, repository owner and changeset_revision or
+ installed_changeset_revision. A new tool shed repository record will be
+ created if one is not located.
"""
install_model = app.install_model
# We store the port in the database.
@@ -697,7 +577,7 @@
for rd_tup in repository_dependencies:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
- if not asbool( only_if_compiling_contained_td ):
+ if not util.asbool( only_if_compiling_contained_td ):
has_repository_dependencies = True
break
# Set has_repository_dependencies_only_if_compiling_contained_td, which will be True only if at
@@ -706,7 +586,7 @@
for rd_tup in repository_dependencies:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
- if asbool( only_if_compiling_contained_td ):
+ if util.asbool( only_if_compiling_contained_td ):
has_repository_dependencies_only_if_compiling_contained_td = True
break
return has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
@@ -838,8 +718,8 @@
# where the repository dependency is not installed prior to the dependent repository's tool dependency compilation process, the tool
# dependency compilation framework will install the repository dependency prior to compilation of the dependent repository's tool
# dependency.
- if not asbool( only_if_compiling_contained_td ):
- if asbool( prior_installation_required ):
+ if not util.asbool( only_if_compiling_contained_td ):
+ if util.asbool( prior_installation_required ):
if is_tool_shed_client( app ):
# We store the port, if one exists, in the database.
tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
@@ -899,34 +779,6 @@
query = app.model.context.query( app.model.Repository )
return query
-def get_repository_tools_tups( app, metadata_dict ):
- """Return a list of tuples of the form (relative_path, guid, tool) for each tool defined in the received tool shed repository metadata."""
- repository_tools_tups = []
- index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
- if 'tools' in metadata_dict:
- for tool_dict in metadata_dict[ 'tools' ]:
- load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
- if shed_conf_dict.get( 'tool_path' ):
- load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
- guid = tool_dict.get( 'guid', None )
- if relative_path and guid:
- tool = app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid )
- else:
- tool = None
- if tool:
- repository_tools_tups.append( ( relative_path, guid, tool ) )
- return repository_tools_tups
-
-def get_shed_tool_conf_dict( app, shed_tool_conf ):
- """Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry in the shed_tool_conf_dict associated with the file."""
- for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
- if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
- return index, shed_tool_conf_dict
- else:
- file_name = basic_util.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
- if shed_tool_conf == file_name:
- return index, shed_tool_conf_dict
-
def get_skip_tool_test_by_changeset_revision( app, changeset_revision ):
"""Return a skip_tool_test record whose initial_changeset_revision is the received changeset_revision."""
# There should only be one, but we'll use first() so callers won't have to handle exceptions.
@@ -1238,7 +1090,7 @@
email_alerts.append( user.email )
else:
subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name )
- email_alerts = json.from_json_string( repository.email_alerts )
+ email_alerts = json.loads( repository.email_alerts )
for email in email_alerts:
to = email.strip()
# Send it
@@ -1262,7 +1114,8 @@
return False
def is_tool_shed_client( app ):
- """ The tool shed and clients to the tool (i.e. Galaxy) require a lot
+ """
+ The tool shed and clients to the tool (i.e. Galaxy) require a lot
of similar functionality in this file but with small differences. This
method should determine if the app performing the action is the tool shed
or a client of the tool shed.
@@ -1338,32 +1191,12 @@
return tool_shed_repository, previous_changeset_revision
return None, None
-def reset_previously_installed_repository( install_model, repository ):
- """
- Reset the atrributes of a tool_shed_repository that was previsouly installed. The repository will be in some state other than INSTALLED,
- so all atributes will be set to the default NEW state. This will enable the repository to be freshly installed.
- """
- debug_msg = "Resetting tool_shed_repository '%s' for installation.\n" % str( repository.name )
- debug_msg += "The current state of the tool_shed_repository is:\n"
- debug_msg += "deleted: %s\n" % str( repository.deleted )
- debug_msg += "tool_shed_status: %s\n" % str( repository.tool_shed_status )
- debug_msg += "uninstalled: %s\n" % str( repository.uninstalled )
- debug_msg += "status: %s\n" % str( repository.status )
- debug_msg += "error_message: %s\n" % str( repository.error_message )
- log.debug( debug_msg )
- repository.deleted = False
- repository.tool_shed_status = None
- repository.uninstalled = False
- repository.status = install_model.ToolShedRepository.installation_status.NEW
- repository.error_message = None
- install_model.context.add( repository )
- install_model.context.flush()
-
def set_image_paths( app, encoded_repository_id, text ):
"""
- Handle tool help image display for tools that are contained in repositories in the tool shed or installed into Galaxy as well as image
- display in repository README files. This method will determine the location of the image file and return the path to it that will enable
- the caller to open the file.
+ Handle tool help image display for tools that are contained in repositories in
+ the tool shed or installed into Galaxy as well as image display in repository
+ README files. This method will determine the location of the image file and
+ return the path to it that will enable the caller to open the file.
"""
if text:
if is_tool_shed_client( app ):
@@ -1373,22 +1206,32 @@
route_to_images = '/repository/static/images/%s' % encoded_repository_id
# We used to require $PATH_TO_IMAGES, but we now eliminate it if it's used.
text = text.replace( '$PATH_TO_IMAGES', '' )
- # Eliminate the invalid setting of ./static/images since the routes will properly display images contained in that directory.
+ # Eliminate the invalid setting of ./static/images since the routes will
+ # properly display images contained in that directory.
text = text.replace( './static/images', '' )
- # Eliminate the default setting of /static/images since the routes will properly display images contained in that directory.
+ # Eliminate the default setting of /static/images since the routes will
+ # properly display images contained in that directory.
text = text.replace( '/static/images', '' )
- # Use regex to instantiate routes into the defined image paths, but replace paths that start with neither http:// nor https://,
- # which will allow for settings like .. images:: http_files/images/help.png
+ # Use regex to instantiate routes into the defined image paths, but replace
+ # paths that start with neither http:// nor https://, which will allow for
+ # settings like .. images:: http_files/images/help.png
for match in re.findall( '.. image:: (?!http)/?(.+)', text ):
text = text.replace( match, match.replace( '/', '%2F' ) )
text = re.sub( r'\.\. image:: (?!https?://)/?(.+)', r'.. image:: %s/\1' % route_to_images, text )
return text
def set_only_if_compiling_contained_td( repository, required_repository ):
- """Return True if the received required_repository is only needed to compile a tool dependency defined for the received repository."""
- # This method is called only from Galaxy when rendering repository dependencies for an installed tool shed repository.
+ """
+ Return True if the received required_repository is only needed to compile a tool
+ dependency defined for the received repository.
+ """
+ # This method is called only from Galaxy when rendering repository dependencies
+ # for an installed tool shed repository.
# TODO: Do we need to check more than changeset_revision here?
- required_repository_tup = [ required_repository.tool_shed, required_repository.name, required_repository.owner, required_repository.changeset_revision ]
+ required_repository_tup = [ required_repository.tool_shed, \
+ required_repository.name, \
+ required_repository.owner, \
+ required_repository.changeset_revision ]
for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
partial_tup = tup[ 0:4 ]
if partial_tup == required_repository_tup:
@@ -1430,63 +1273,8 @@
app.install_model.context.add( repository )
app.install_model.context.flush()
-def tool_shed_from_repository_clone_url( repository_clone_url ):
- """Given a repository clone URL, return the tool shed that contains the repository."""
- return common_util.remove_protocol_and_user_from_clone_url( repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' )
-
def tool_shed_is_this_tool_shed( toolshed_base_url ):
"""Determine if a tool shed is the current tool shed."""
cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url( toolshed_base_url )
cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url( str( url_for( '/', qualified=True ) ) )
return cleaned_toolshed_base_url == cleaned_tool_shed
-
-def update_in_shed_tool_config( app, repository ):
- """
- A tool shed repository is being updated so change the shed_tool_conf file. Parse the config
- file to generate the entire list of config_elems instead of using the in-memory list.
- """
- shed_conf_dict = repository.get_shed_config_dict( app )
- shed_tool_conf = shed_conf_dict[ 'config_filename' ]
- tool_path = shed_conf_dict[ 'tool_path' ]
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository )
- repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- clone_url = common_util.generate_clone_url_for_installed_repository( app, repository )
- cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( clone_url )
- tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
- owner = repository.owner
- if not owner:
- owner = get_repository_owner( cleaned_repository_clone_url )
- guid_to_tool_elem_dict = {}
- for tool_config_filename, guid, tool in repository_tools_tups:
- guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed,
- repository.name,
- repository.changeset_revision,
- repository.owner or '',
- tool_config_filename,
- tool,
- None )
- config_elems = []
- tree, error_message = xml_util.parse_xml( shed_tool_conf )
- if tree:
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
-
-def update_tool_shed_repository_status( app, tool_shed_repository, status, error_message=None ):
- """Update the status of a tool shed repository in the process of being installed into Galaxy."""
- context = app.install_model.context
- tool_shed_repository.status = status
- if error_message:
- tool_shed_repository.error_message = str( error_message )
- context.add( tool_shed_repository )
- context.flush()
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -4,11 +4,34 @@
from galaxy import util
from galaxy.model.orm import and_
+from galaxy.web.form_builder import SelectField
+
from tool_shed.util import hg_util
from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
+def build_tool_dependencies_select_field( app, tool_shed_repository, name, multiple=True, display='checkboxes',
+ uninstalled_only=False ):
+ """
+ Generate a SelectField consisting of the current list of tool dependency ids
+ for an installed tool shed repository.
+ """
+ tool_dependencies_select_field = SelectField( name=name, multiple=multiple, display=display )
+ for tool_dependency in tool_shed_repository.tool_dependencies:
+ if uninstalled_only:
+ if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ app.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+ continue
+ else:
+ if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ app.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+ continue
+ option_label = '%s version %s' % ( str( tool_dependency.name ), str( tool_dependency.version ) )
+ option_value = app.security.encode_id( tool_dependency.id )
+ tool_dependencies_select_field.add_option( option_label, option_value )
+ return tool_dependencies_select_field
+
def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status, set_status=True ):
"""Create or update a tool_dependency record in the Galaxy database."""
# Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled
diff -r e9de99ef77105d221df55771724ca56b86485a4b -r 55b4556e0b7eba1ae3bfeb6e850831e25b3238ac lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -137,8 +137,8 @@
def xml_to_string( elem, encoding='utf-8', use_indent=False, level=0 ):
if elem is not None:
if use_indent:
- # We were called from suc.config_elems_to_xml_file(), so set the level to 1 since level 0
- # is the <toolbox> tag set.
+ # We were called from ToolPanelManager.config_elems_to_xml_file(), so
+ # set the level to 1 since level 0 is the <toolbox> tag set.
indent( elem, level=level )
if using_python_27:
xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0