1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a8baee5722c3/
Changeset: a8baee5722c3
User: greg
Date: 2013-08-26 20:12:43
Summary: Fix imports in the tool shed's metadata_util.
Affected #: 1 file
diff -r f5791b418b4e1d0be9d94895f0642083f795e61f -r a8baee5722c3d6df9d71bfdd5ed40c7890b2a473 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -19,7 +19,7 @@
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
from tool_shed.util import xml_util
-import tool_shed.galaxy_install.tool_dependencies.common_util as cu
+from tool_shed.galaxy_install.tool_dependencies import td_common_util
import pkg_resources
@@ -1124,7 +1124,7 @@
if not toolshed:
# Default to the current tool shed.
toolshed = str( url_for( '/', qualified=True ) ).rstrip( '/' )
- cleaned_toolshed = cu.clean_tool_shed_url( toolshed )
+ cleaned_toolshed = td_common_util.clean_tool_shed_url( toolshed )
name = repository_elem.get( 'name' )
owner = repository_elem.get( 'owner' )
changeset_revision = repository_elem.get( 'changeset_revision' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1520189c271f/
Changeset: 1520189c271f
User: Dave Bouvier
Date: 2013-08-26 17:42:35
Summary: Update tool migration process to handle orphan tool dependencies.
Affected #: 3 files
diff -r d51946b4a19f41ebb77e79774c01b89357aa007f -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -108,7 +108,11 @@
repository_dependencies_dict )
for tool_shed_repository in ordered_tool_shed_repositories:
- self.install_repository( repository_elem, tool_shed_repository, install_dependencies )
+ is_repository_dependency = self.__is_repository_dependency( name, changeset_revision, tool_shed_repository )
+ self.install_repository( repository_elem,
+ tool_shed_repository,
+ install_dependencies,
+ is_repository_dependency=is_repository_dependency )
else:
message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
@@ -320,7 +324,7 @@
tool_panel_elems.append( elem )
return tool_panel_elems
- def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ):
+ def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies, is_repository_dependency=False ):
"""
Generate the metadata for the installed tool shed repository, among other things. If the installed tool_shed_repository contains tools
that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each of the tools defined in the
@@ -333,37 +337,38 @@
repo_install_dir = os.path.join( self.tool_path, relative_install_dir )
else:
repo_install_dir = relative_install_dir
- for tool_elem in repository_elem:
- # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
- tool_config = tool_elem.get( 'file' )
- guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config )
- # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
- is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
- if is_displayed:
- tool_panel_dict_for_tool_config = tool_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
- # The tool-panel_dict has the following structure.
- # {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
- for k, v in tool_panel_dict_for_tool_config.items():
- tool_panel_dict_for_display[ k ] = v
- for tool_panel_dict in v:
- # Keep track of tool config file names associated with entries that have been made to the migrated_tools_conf.xml file so
- # they can be eliminated from all non-shed-related tool panel configs.
- tool_config_file = tool_panel_dict.get( 'tool_config', None )
- if tool_config_file:
- if tool_config_file not in tool_configs_to_filter:
- tool_configs_to_filter.append( tool_config_file )
- else:
- print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
- % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
- if tool_configs_to_filter:
- lock = threading.Lock()
- lock.acquire( True )
- try:
- self.filter_and_persist_proprietary_tool_panel_configs( tool_configs_to_filter )
- except Exception, e:
- log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs:\n%s" % str( e ) )
- finally:
- lock.release()
+ if not is_repository_dependency:
+ for tool_elem in repository_elem:
+ # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
+ tool_config = tool_elem.get( 'file' )
+ guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config )
+ # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
+ is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
+ if is_displayed:
+ tool_panel_dict_for_tool_config = tool_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
+ # The tool-panel_dict has the following structure.
+ # {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
+ for k, v in tool_panel_dict_for_tool_config.items():
+ tool_panel_dict_for_display[ k ] = v
+ for tool_panel_dict in v:
+ # Keep track of tool config file names associated with entries that have been made to the migrated_tools_conf.xml file so
+ # they can be eliminated from all non-shed-related tool panel configs.
+ tool_config_file = tool_panel_dict.get( 'tool_config', None )
+ if tool_config_file:
+ if tool_config_file not in tool_configs_to_filter:
+ tool_configs_to_filter.append( tool_config_file )
+ else:
+ print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
+ % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
+ if tool_configs_to_filter:
+ lock = threading.Lock()
+ lock.acquire( True )
+ try:
+ self.filter_and_persist_proprietary_tool_panel_configs( tool_configs_to_filter )
+ except Exception, e:
+ log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs:\n%s" % str( e ) )
+ finally:
+ lock.release()
metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=self.app,
repository=tool_shed_repository,
changeset_revision=tool_shed_repository.changeset_revision,
@@ -377,7 +382,8 @@
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
- if 'tool_dependencies' in metadata_dict:
+ has_tool_dependencies = self.__has_tool_dependencies( metadata_dict )
+ if has_tool_dependencies:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
else:
@@ -400,30 +406,31 @@
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
tool_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
- if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
- # Install tool dependencies.
- suc.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
- # Get the tool_dependencies.xml file from disk.
- tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
- installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
- for installed_tool_dependency in installed_tool_dependencies:
- if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
- print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
- print installed_tool_dependency.error_message, '\n\n'
- tool_util.add_to_tool_panel( self.app,
- tool_shed_repository.name,
- repository_clone_url,
- tool_shed_repository.installed_changeset_revision,
- repository_tools_tups,
- self.repository_owner,
- self.migrated_tools_config,
- tool_panel_dict=tool_panel_dict_for_display,
- new_install=True )
+ if not is_repository_dependency:
+ tool_util.add_to_tool_panel( self.app,
+ tool_shed_repository.name,
+ repository_clone_url,
+ tool_shed_repository.installed_changeset_revision,
+ repository_tools_tups,
+ self.repository_owner,
+ self.migrated_tools_config,
+ tool_panel_dict=tool_panel_dict_for_display,
+ new_install=True )
+ if install_dependencies and tool_dependencies and has_tool_dependencies:
+ # Install tool dependencies.
+ suc.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ # Get the tool_dependencies.xml file from disk.
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
+ installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=self.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_dependencies )
+ for installed_tool_dependency in installed_tool_dependencies:
+ if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
+ print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
+ print installed_tool_dependency.error_message, '\n\n'
if 'datatypes' in metadata_dict:
tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -455,7 +462,7 @@
except:
pass
- def install_repository( self, repository_elem, tool_shed_repository, install_dependencies ):
+ def install_repository( self, repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency=False ):
"""Install a single repository, loading contained tools into the tool panel."""
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
relative_clone_dir = os.path.join( tool_shed_repository.tool_shed,
@@ -482,7 +489,8 @@
repository_clone_url=repository_clone_url,
relative_install_dir=relative_install_dir,
repository_elem=repository_elem,
- install_dependencies=install_dependencies )
+ install_dependencies=install_dependencies,
+ is_repository_dependency=is_repository_dependency )
self.app.sa_session.refresh( tool_shed_repository )
metadata_dict = tool_shed_repository.metadata
if 'tools' in metadata_dict:
@@ -526,6 +534,12 @@
self.app.sa_session.add( tool_version_association )
self.app.sa_session.flush()
suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
+ else:
+ print 'Error attempting to clone repository %s: %s' % ( str( tool_shed_repository.name ), str( error_message ) )
+ suc.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.model.ToolShedRepository.installation_status.ERROR,
+ error_message=error_message )
@property
def non_shed_tool_panel_configs( self ):
@@ -565,6 +579,12 @@
break
return ordered_tool_shed_repositories
+ def __has_tool_dependencies( self, metadata_dict ):
+ '''Determine if the provided metadata_dict specifies tool dependencies.'''
+ if 'tool_dependencies' in metadata_dict or 'orphan_tool_dependencies' in metadata_dict:
+ return True
+ return False
+
def __isinstalled( self, clone_dir ):
full_path = os.path.abspath( clone_dir )
if os.path.exists( full_path ):
@@ -574,6 +594,14 @@
return True
return False
+ def __is_repository_dependency( self, name, changeset_revision, tool_shed_repository ):
+ '''Determine if the provided tool shed repository is a repository dependency.'''
+ if str( tool_shed_repository.name ) != str( name ) or \
+ str( tool_shed_repository.owner ) != str( self.repository_owner ) or \
+ str( tool_shed_repository.changeset_revision ) != str( changeset_revision ):
+ return True
+ return False
+
def __is_valid_repository_tag( self, elem ):
# <repository name="emboss_datatypes" description="Datatypes for Emboss tools" changeset_revision="a89163f31369" />
if elem.tag != 'repository':
diff -r d51946b4a19f41ebb77e79774c01b89357aa007f -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1521,10 +1521,12 @@
# purging is not supported by the mercurial API.
commands.update( get_configured_ui(), repo, rev=ctx_rev )
-def update_tool_shed_repository_status( app, tool_shed_repository, status ):
+def update_tool_shed_repository_status( app, tool_shed_repository, status, error_message=None ):
"""Update the status of a tool shed repository in the process of being installed into Galaxy."""
sa_session = app.model.context.current
tool_shed_repository.status = status
+ if error_message:
+ tool_shed_repository.error_message = str( error_message )
sa_session.add( tool_shed_repository )
sa_session.flush()
diff -r d51946b4a19f41ebb77e79774c01b89357aa007f -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -39,37 +39,6 @@
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
-def get_download_url_for_platform( url_templates, platform_info_dict ):
- '''
- Compare the dict returned by get_platform_info() with the values specified in the base_url element. Return
- true if and only if all defined attributes match the corresponding dict entries. If an entry is not
- defined in the base_url element, it is assumed to be irrelevant at this stage. For example,
- <base_url os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit</base_url>
- where the OS must be 'darwin', but the architecture is filled in later using string.Template.
- '''
- os_ok = False
- architecture_ok = False
- for url_template in url_templates:
- os_name = url_template.get( 'os', None )
- architecture = url_template.get( 'architecture', None )
- if os_name:
- if os_name.lower() == platform_info_dict[ 'os' ]:
- os_ok = True
- else:
- os_ok = False
- else:
- os_ok = True
- if architecture:
- if architecture.lower() == platform_info_dict[ 'architecture' ]:
- architecture_ok = True
- else:
- architecture_ok = False
- else:
- architecture_ok = True
- if os_ok and architecture_ok:
- return url_template
- return None
-
def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status, set_status=True ):
# Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
sa_session = app.model.context.current
@@ -189,6 +158,37 @@
message += "This repository contains no tools, so it's defined tool dependencies are considered orphans within this repository.<br/>"
return message
+def get_download_url_for_platform( url_templates, platform_info_dict ):
+ '''
+ Compare the dict returned by get_platform_info() with the values specified in the base_url element. Return
+ true if and only if all defined attributes match the corresponding dict entries. If an entry is not
+ defined in the base_url element, it is assumed to be irrelevant at this stage. For example,
+ <base_url os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit</base_url>
+ where the OS must be 'darwin', but the architecture is filled in later using string.Template.
+ '''
+ os_ok = False
+ architecture_ok = False
+ for url_template in url_templates:
+ os_name = url_template.get( 'os', None )
+ architecture = url_template.get( 'architecture', None )
+ if os_name:
+ if os_name.lower() == platform_info_dict[ 'os' ]:
+ os_ok = True
+ else:
+ os_ok = False
+ else:
+ os_ok = True
+ if architecture:
+ if architecture.lower() == platform_info_dict[ 'architecture' ]:
+ architecture_ok = True
+ else:
+ architecture_ok = False
+ else:
+ architecture_ok = True
+ if os_ok and architecture_ok:
+ return url_template
+ return None
+
def get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ):
if all_tool_dependencies:
tool_dependencies = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/89813e684db6/
Changeset: 89813e684db6
User: greg
Date: 2013-08-26 15:57:18
Summary: Enhance the Galaxy tool migration framework to support restricted repository dependency definitions. Repository dependency definitions contained in repositories that contain tools migrated from the Galaxy code base cannot define a dependency to a repository that contains a tool at any level of the dependency hierarchy.
Affected #: 9 files
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -17,6 +17,7 @@
from galaxy.util import json
from galaxy.model.orm import and_
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import common_util as scu
from tool_shed.util import container_util
from tool_shed.util import encoding_util
from tool_shed.util import export_util
@@ -1579,6 +1580,8 @@
changeset_revision = kwd.get( 'changeset_revision', None )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repository_id = trans.security.encode_id( repository.id )
+ # We aren't concerned with repository's of type tool_dependency_definition here if a repository_metadata record is not returned
+ # because repositories of this type will never have repository dependencies.
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
@@ -1642,7 +1645,7 @@
encoded_repository_ids = []
changeset_revisions = []
for required_repository_tup in decoded_required_repository_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( required_repository_tup )
+ tool_shed, name, owner, changeset_revision, prior_installation_required = scu.parse_repository_dependency_tuple( required_repository_tup )
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
changeset_revisions.append( changeset_revision )
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -54,7 +54,8 @@
print error_message
else:
root = tree.getroot()
- self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
+ self.tool_shed_url = suc.get_url_from_tool_shed( self.app, root.get( 'name' ) )
+ self.tool_shed = suc.clean_tool_shed_url( self.tool_shed_url )
self.repository_owner = common_util.REPOSITORY_OWNER
index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
@@ -85,7 +86,29 @@
for repository_elem in root:
# Make sure we have a valid repository tag.
if self.__is_valid_repository_tag( repository_elem ):
- self.install_repository( repository_elem, install_dependencies )
+ # Get all repository dependencies for the repository defined by the current repository_elem. Repository dependency
+ # definitions contained in tool shed repositories with migrated tools must never define a relationship to a repository
+ # dependency that contains a tool. The repository dependency can only contain items that are not loaded into the Galaxy
+ # tool panel (e.g., tool dependency definitions, custom datatypes, etc). This restriction must be followed down the
+ # entire dependency hierarchy.
+ name = repository_elem.get( 'name' )
+ changeset_revision = repository_elem.get( 'changeset_revision' )
+ tool_shed_accessible, repository_dependencies_dict = \
+ common_util.get_repository_dependencies( app, self.tool_shed_url, name, self.repository_owner, changeset_revision )
+ # Make sure all repository dependency records exist (as tool_shed_repository table rows) in the Galaxy database.
+ created_tool_shed_repositories = self.create_or_update_tool_shed_repository_records( name,
+ changeset_revision,
+ repository_dependencies_dict )
+ # Order the repositories for proper installation. This process is similar to the process used when installing tool
+ # shed repositories (i.e., the order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/
+ # repository_util), but does not handle managing tool panel sections and other components since repository dependency
+ # definitions contained in tool shed repositories with migrated tools must never define a relationship to a repository
+ # dependency that contains a tool.
+ ordered_tool_shed_repositories = self.order_repositories_for_installation( created_tool_shed_repositories,
+ repository_dependencies_dict )
+
+ for tool_shed_repository in ordered_tool_shed_repositories:
+ self.install_repository( repository_elem, tool_shed_repository, install_dependencies )
else:
message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
@@ -95,6 +118,52 @@
message += "Try again later.\n"
print message
+ def create_or_update_tool_shed_repository_record( self, name, owner, changeset_revision, description=None ):
+
+ # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
+ relative_clone_dir = os.path.join( self.tool_shed, 'repos', owner, name, changeset_revision )
+ clone_dir = os.path.join( self.tool_path, relative_clone_dir )
+ if not self.__isinstalled( clone_dir ):
+ repository_clone_url = os.path.join( self.tool_shed_url, 'repos', owner, name )
+ relative_install_dir = os.path.join( relative_clone_dir, name )
+ install_dir = os.path.join( clone_dir, name )
+ ctx_rev = suc.get_ctx_rev( self.app, self.tool_shed_url, name, owner, changeset_revision )
+ tool_shed_repository = suc.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=self.app.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=self.repository_owner,
+ dist_to_shed=True )
+ return tool_shed_repository
+ return None
+
+ def create_or_update_tool_shed_repository_records( self, name, changeset_revision, repository_dependencies_dict ):
+ """
+ Make sure the repository defined by name and changeset_revision and all of it's repository dependencies have associated tool_shed_repository
+ table rows in the Galaxy database.
+ """
+ created_tool_shed_repositories = []
+ description = repository_dependencies_dict.get( 'description', None )
+ tool_shed_repository = self.create_or_update_tool_shed_repository_record( name, self.repository_owner, changeset_revision, description=description )
+ if tool_shed_repository:
+ created_tool_shed_repositories.append( tool_shed_repository )
+ for rd_key, rd_tups in repository_dependencies_dict.items():
+ if rd_key in [ 'root_key', 'description' ]:
+ continue
+ for rd_tup in rd_tups:
+ rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ # TODO: Make sure the repository description is applied to the new repository record during installation.
+ tool_shed_repository = self.create_or_update_tool_shed_repository_record( rd_name, rd_owner, rd_changeset_revision, description=None )
+ if tool_shed_repository:
+ created_tool_shed_repositories.append( tool_shed_repository )
+ return created_tool_shed_repositories
+
def filter_and_persist_proprietary_tool_panel_configs( self, tool_configs_to_filter ):
"""Eliminate all entries in all non-shed-related tool panel configs for all tool config file names in the received tool_configs_to_filter."""
for proprietary_tool_conf in self.proprietary_tool_confs:
@@ -131,7 +200,38 @@
fh.close()
shutil.move( tmp_filename, os.path.abspath( proprietary_tool_conf ) )
os.chmod( proprietary_tool_conf, 0644 )
-
+
+ def get_containing_tool_sections( self, tool_config ):
+ """
+ If tool_config is defined somewhere in self.proprietary_tool_panel_elems, return True and a list of ToolSections in which the
+ tool is displayed. If the tool is displayed outside of any sections, None is appended to the list.
+ """
+ tool_sections = []
+ is_displayed = False
+ for proprietary_tool_panel_elem in self.proprietary_tool_panel_elems:
+ if proprietary_tool_panel_elem.tag == 'tool':
+ # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
+ proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
+ if tool_config == proprietary_name:
+ # The tool is loaded outside of any sections.
+ tool_sections.append( None )
+ if not is_displayed:
+ is_displayed = True
+ if proprietary_tool_panel_elem.tag == 'section':
+ # The proprietary_tool_panel_elem looks something like <section name="EMBOSS" id="EMBOSSLite">.
+ for section_elem in proprietary_tool_panel_elem:
+ if section_elem.tag == 'tool':
+ # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
+ proprietary_tool_config = section_elem.get( 'file' )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
+ if tool_config == proprietary_name:
+ # The tool is loaded inside of the section_elem.
+ tool_sections.append( ToolSection( proprietary_tool_panel_elem ) )
+ if not is_displayed:
+ is_displayed = True
+ return is_displayed, tool_sections
+
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
if self.shed_config_dict.get( 'tool_path' ):
relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
@@ -150,6 +250,30 @@
tool = self.toolbox.load_tool( full_path )
return suc.generate_tool_guid( repository_clone_url, tool )
+ def get_prior_install_required_dict( self, tool_shed_repositories, repository_dependencies_dict ):
+ """
+ Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained in the received
+ list of tsr_ids and whose associated repository must be installed prior to the repository associated with the tsr_id key.
+ """
+ # Initialize the dictionary.
+ prior_install_required_dict = {}
+ tsr_ids = [ tool_shed_repository.id for tool_shed_repository in tool_shed_repositories ]
+ for tsr_id in tsr_ids:
+ prior_install_required_dict[ tsr_id ] = []
+ # Inspect the repository dependencies about to be installed and populate the dictionary.
+ for rd_key, rd_tups in repository_dependencies_dict.items():
+ if rd_key in [ 'root_key', 'description' ]:
+ continue
+ for rd_tup in rd_tups:
+ prior_install_ids = []
+ tool_shed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( rd_tup )
+ if util.asbool( prior_installation_required ):
+ for tsr in tool_shed_repositories:
+ if tsr.name == name and tsr.owner == owner and tsr.changeset_revision == changeset_revision:
+ prior_install_ids.append( tsr.id )
+ prior_install_required_dict[ tsr.id ] = prior_install_ids
+ return prior_install_required_dict
+
def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
"""
Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are
@@ -196,37 +320,6 @@
tool_panel_elems.append( elem )
return tool_panel_elems
- def get_containing_tool_sections( self, tool_config ):
- """
- If tool_config is defined somewhere in self.proprietary_tool_panel_elems, return True and a list of ToolSections in which the
- tool is displayed. If the tool is displayed outside of any sections, None is appended to the list.
- """
- tool_sections = []
- is_displayed = False
- for proprietary_tool_panel_elem in self.proprietary_tool_panel_elems:
- if proprietary_tool_panel_elem.tag == 'tool':
- # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
- proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
- proprietary_name = suc.strip_path( proprietary_tool_config )
- if tool_config == proprietary_name:
- # The tool is loaded outside of any sections.
- tool_sections.append( None )
- if not is_displayed:
- is_displayed = True
- if proprietary_tool_panel_elem.tag == 'section':
- # The proprietary_tool_panel_elem looks something like <section name="EMBOSS" id="EMBOSSLite">.
- for section_elem in proprietary_tool_panel_elem:
- if section_elem.tag == 'tool':
- # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
- proprietary_tool_config = section_elem.get( 'file' )
- proprietary_name = suc.strip_path( proprietary_tool_config )
- if tool_config == proprietary_name:
- # The tool is loaded inside of the section_elem.
- tool_sections.append( ToolSection( proprietary_tool_panel_elem ) )
- if not is_displayed:
- is_displayed = True
- return is_displayed, tool_sections
-
def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ):
"""
Generate the metadata for the installed tool shed repository, among other things. If the installed tool_shed_repository contains tools
@@ -362,33 +455,26 @@
except:
pass
- def install_repository( self, repository_elem, install_dependencies ):
- # Install a single repository, loading contained tools into the tool panel.
- name = repository_elem.get( 'name' )
- description = repository_elem.get( 'description' )
- installed_changeset_revision = repository_elem.get( 'changeset_revision' )
+ def install_repository( self, repository_elem, tool_shed_repository, install_dependencies ):
+ """Install a single repository, loading contained tools into the tool panel."""
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
- relative_clone_dir = os.path.join( self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
+ relative_clone_dir = os.path.join( tool_shed_repository.tool_shed,
+ 'repos',
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ tool_shed_repository.installed_changeset_revision )
clone_dir = os.path.join( self.tool_path, relative_clone_dir )
if self.__isinstalled( clone_dir ):
- print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
+ print "Skipping automatic install of repository '", tool_shed_repository.name, "' because it has already been installed in location ", clone_dir
else:
- tool_shed_url = suc.get_url_from_tool_shed( self.app, self.tool_shed )
- repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
- relative_install_dir = os.path.join( relative_clone_dir, name )
- install_dir = os.path.join( clone_dir, name )
- ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, self.repository_owner, installed_changeset_revision )
- tool_shed_repository = suc.create_or_update_tool_shed_repository( app=self.app,
- name=name,
- description=description,
- installed_changeset_revision=installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=self.repository_owner,
- dist_to_shed=True )
+ repository_clone_url = os.path.join( self.tool_shed_url, 'repos', tool_shed_repository.owner, tool_shed_repository.name )
+ relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+ install_dir = os.path.join( clone_dir, tool_shed_repository.name )
+ ctx_rev = suc.get_ctx_rev( self.app,
+ self.tool_shed_url,
+ tool_shed_repository.name,
+ tool_shed_repository.owner,
+ tool_shed_repository.installed_changeset_revision )
suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
@@ -405,8 +491,8 @@
self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision )
- text = common_util.tool_shed_get( self.app, tool_shed_url, url )
+ ( self.tool_shed_url, tool_shed_repository.name, self.repository_owner, tool_shed_repository.installed_changeset_revision )
+ text = common_util.tool_shed_get( self.app, self.tool_shed_url, url )
if text:
tool_version_dicts = from_json_string( text )
tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
@@ -445,6 +531,40 @@
def non_shed_tool_panel_configs( self ):
return common_util.get_non_shed_tool_panel_configs( self.app )
+ def order_repositories_for_installation( self, tool_shed_repositories, repository_dependencies_dict ):
+ """
+ Some repositories may have repository dependencies that are required to be installed before the dependent repository. This method will
+ inspect the list of repositories about to be installed and make sure to order them appropriately. For each repository about to be installed,
+ if required repositories are not contained in the list of repositories about to be installed, then they are not considered. Repository
+ dependency definitions that contain circular dependencies should not result in an infinite loop, but obviously prior installation will not be
+ handled for one or more of the repositories that require prior installation. This process is similar to the process used when installing tool
+ shed repositories (i.e., the order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not handle
+ managing tool panel sections and other components since repository dependency definitions contained in tool shed repositories with migrated
+ tools must never define a relationship to a repository dependency that contains a tool.
+ """
+ ordered_tool_shed_repositories = []
+ ordered_tsr_ids = []
+ processed_tsr_ids = []
+ prior_install_required_dict = self.get_prior_install_required_dict( tool_shed_repositories, repository_dependencies_dict )
+ tsr_ids = [ tool_shed_repository.id for tool_shed_repository in tool_shed_repositories ]
+ while len( processed_tsr_ids ) != len( prior_install_required_dict.keys() ):
+ tsr_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_install_required_dict, processed_tsr_ids )
+ processed_tsr_ids.append( tsr_id )
+ # Create the ordered_tsr_ids, the ordered_repo_info_dicts and the ordered_tool_panel_section_keys lists.
+ if tsr_id not in ordered_tsr_ids:
+ prior_install_required_ids = prior_install_required_dict[ tsr_id ]
+ for prior_install_required_id in prior_install_required_ids:
+ if prior_install_required_id not in ordered_tsr_ids:
+ # Install the associated repository dependency first.
+ ordered_tsr_ids.append( prior_install_required_id )
+ ordered_tsr_ids.append( tsr_id )
+ for ordered_tsr_id in ordered_tsr_ids:
+ for tool_shed_repository in tool_shed_repositories:
+ if tool_shed_repository.id == ordered_tsr_id:
+ ordered_tool_shed_repositories.append( tool_shed_repository )
+ break
+ return ordered_tool_shed_repositories
+
def __isinstalled( self, clone_dir ):
full_path = os.path.abspath( clone_dir )
if os.path.exists( full_path ):
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -211,7 +211,7 @@
if key in [ 'description', 'root_key' ]:
continue
for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup )
+ tool_shed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( rd_tup )
# Updates to installed repository revisions may have occurred, so make sure to locate the appropriate repository revision if one exists.
# We need to create a temporary repo_info_tuple that includes the correct repository owner which we get from the current rd_tup. The current
# tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td )
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/common_util.py
--- a/lib/tool_shed/util/common_util.py
+++ b/lib/tool_shed/util/common_util.py
@@ -1,11 +1,20 @@
import os
import urllib2
+from galaxy.util import json
from galaxy.util.odict import odict
from tool_shed.util import encoding_util
from tool_shed.util import xml_util
REPOSITORY_OWNER = 'devteam'
+def accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies ):
+ if tool_shed_accessible:
+ if tool_dependencies:
+ for tool_dependency in tool_dependencies:
+ if tool_dependency not in all_tool_dependencies:
+ all_tool_dependencies.append( tool_dependency )
+ return all_tool_dependencies
+
def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
# Get the 000x_tools.xml file associated with the current migrate_tools version number.
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
@@ -23,30 +32,38 @@
if tool_shed_url:
for elem in root:
if elem.tag == 'repository':
- tool_dependencies = []
- tool_dependencies_dict = {}
+ repository_dependencies = []
+ all_tool_dependencies = []
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
- ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
- try:
- text = tool_shed_get( app, tool_shed_url, url )
- tool_shed_accessible = True
- except Exception, e:
- # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
- tool_shed_accessible = False
- print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
- if tool_shed_accessible:
- if text:
- tool_dependencies_dict = encoding_util.tool_shed_decode( text )
- for dependency_key, requirements_dict in tool_dependencies_dict.items():
- tool_dependency_name = requirements_dict[ 'name' ]
- tool_dependency_version = requirements_dict[ 'version' ]
- tool_dependency_type = requirements_dict[ 'type' ]
- tool_dependency_readme = requirements_dict.get( 'readme', '' )
- tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
- for tool_elem in elem.findall( 'tool' ):
- migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
+ tool_shed_accessible, repository_dependencies_dict = get_repository_dependencies( app,
+ tool_shed_url,
+ repository_name,
+ REPOSITORY_OWNER,
+ changeset_revision )
+ # Accumulate all tool dependencies defined for repository dependencies for display to the user.
+ for rd_key, rd_tups in repository_dependencies_dict.items():
+ if rd_key in [ 'root_key', 'description' ]:
+ continue
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision, prior_installation_required = parse_repository_dependency_tuple( rd_tup )
+ tool_shed_accessible, tool_dependencies = get_tool_dependencies( app,
+ tool_shed,
+ name,
+ owner,
+ changeset_revision )
+ all_tool_dependencies = accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies )
+ tool_shed_accessible, tool_dependencies = get_tool_dependencies( app, tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
+ all_tool_dependencies = accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies )
+ for tool_elem in elem.findall( 'tool' ):
+ tool_config_file_name = tool_elem.get( 'file' )
+ if tool_config_file_name:
+ # We currently do nothing with repository dependencies except install them (we do not display repositories that will be
+ # installed to the user). However, we'll store them in the following dictionary in case we choose to display them in the
+ # future.
+ dependencies_dict = dict( tool_dependencies=all_tool_dependencies,
+ repository_dependencies=repository_dependencies )
+ migrated_tool_configs_dict[ tool_config_file_name ] = dependencies_dict
if tool_shed_accessible:
# Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
for tool_panel_config in tool_panel_configs:
@@ -90,6 +107,44 @@
config_filenames.append( config_filename )
return config_filenames
+def get_repository_dependencies( app, tool_shed_url, repository_name, repository_owner, changeset_revision ):
+ repository_dependencies_dict = {}
+ tool_shed_accessible = True
+ url = '%s/repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_url, repository_name, repository_owner, changeset_revision )
+ try:
+ raw_text = tool_shed_get( app, tool_shed_url, url )
+ tool_shed_accessible = True
+ except Exception, e:
+ tool_shed_accessible = False
+ print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
+ if tool_shed_accessible:
+ if len( raw_text ) > 2:
+ encoded_text = json.from_json_string( raw_text )
+ repository_dependencies_dict = encoding_util.tool_shed_decode( encoded_text )
+ return tool_shed_accessible, repository_dependencies_dict
+
+def get_tool_dependencies( app, tool_shed_url, repository_name, repository_owner, changeset_revision ):
+ tool_dependencies = []
+ tool_shed_accessible = True
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
+ ( tool_shed_url, repository_name, repository_owner, changeset_revision )
+ try:
+ text = tool_shed_get( app, tool_shed_url, url )
+ tool_shed_accessible = True
+ except Exception, e:
+ tool_shed_accessible = False
+ print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
+ if tool_shed_accessible:
+ if text:
+ tool_dependencies_dict = encoding_util.tool_shed_decode( text )
+ for dependency_key, requirements_dict in tool_dependencies_dict.items():
+ tool_dependency_name = requirements_dict[ 'name' ]
+ tool_dependency_version = requirements_dict[ 'version' ]
+ tool_dependency_type = requirements_dict[ 'type' ]
+ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type ) )
+ return tool_shed_accessible, tool_dependencies
+
def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
search_str = '://%s' % tool_shed
for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
@@ -98,7 +153,29 @@
shed_url = shed_url.rstrip( '/' )
return shed_url
return None
-
+
+def parse_repository_dependency_tuple( repository_dependency_tuple, contains_error=False ):
+ if contains_error:
+ if len( repository_dependency_tuple ) == 5:
+ # Metadata should have been reset on the repository containing this repository_dependency definition.
+ tool_shed, name, owner, changeset_revision, error = repository_dependency_tuple
+ # Default prior_installation_required to False.
+ prior_installation_required = False
+ elif len( repository_dependency_tuple ) == 6:
+ toolshed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple
+ prior_installation_required = str( prior_installation_required )
+ return toolshed, name, owner, changeset_revision, prior_installation_required, error
+ else:
+ if len( repository_dependency_tuple ) == 4:
+ # Metadata should have been reset on the repository containing this repository_dependency definition.
+ tool_shed, name, owner, changeset_revision = repository_dependency_tuple
+ # Default prior_installation_required to False.
+ prior_installation_required = False
+ elif len( repository_dependency_tuple ) == 5:
+ tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tuple
+ prior_installation_required = str( prior_installation_required )
+ return tool_shed, name, owner, changeset_revision, prior_installation_required
+
def tool_shed_get( app, tool_shed_url, uri ):
"""Make contact with the tool shed via the uri provided."""
registry = app.tool_shed_registry
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -3,6 +3,7 @@
import threading
from galaxy.util import asbool
from galaxy.web.framework.helpers import time_ago
+from tool_shed.util import common_util
from tool_shed.util import readme_util
import tool_shed.util.shed_util_common as suc
@@ -63,7 +64,7 @@
self.repository_dependencies.remove( contained_repository_dependency )
def to_repository_dependency( self, repository_dependency_id ):
- toolshed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( self.key.split( STRSEP ) )
+ toolshed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( self.key.split( STRSEP ) )
return RepositoryDependency( id=repository_dependency_id,
toolshed=toolshed,
repository_name=name,
@@ -463,7 +464,7 @@
folder_id += 1
invalid_repository_dependency_id += 1
toolshed, name, owner, changeset_revision, prior_installation_required, error = \
- suc.parse_repository_dependency_tuple( invalid_repository_dependency, contains_error=True )
+ common_util.parse_repository_dependency_tuple( invalid_repository_dependency, contains_error=True )
key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision, prior_installation_required )
label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( name, changeset_revision, owner )
folder = Folder( id=folder_id,
@@ -1336,7 +1337,7 @@
can_create_dependency = not is_subfolder_of( sub_folder, repository_dependency )
if can_create_dependency:
toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = \
- suc.parse_repository_dependency_tuple( repository_dependency )
+ common_util.parse_repository_dependency_tuple( repository_dependency )
repository_dependency_id += 1
repository_dependency = RepositoryDependency( id=repository_dependency_id,
toolshed=toolshed,
@@ -1352,7 +1353,7 @@
def is_subfolder_of( folder, repository_dependency ):
toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = \
- suc.parse_repository_dependency_tuple( repository_dependency )
+ common_util.parse_repository_dependency_tuple( repository_dependency )
key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision, asbool( prior_installation_required ) )
for sub_folder in folder.folders:
if key == sub_folder.key:
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/encoding_util.py
--- a/lib/tool_shed/util/encoding_util.py
+++ b/lib/tool_shed/util/encoding_util.py
@@ -44,4 +44,4 @@
value = val
a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value )
b = binascii.hexlify( value )
- return "%s:%s" % ( a, b )
\ No newline at end of file
+ return "%s:%s" % ( a, b )
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -354,9 +354,9 @@
Determine if the only difference between rd_tup and a dependency definition in the list of repository_dependencies is the changeset_revision value.
"""
new_metadata_required = False
- rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup )
+ rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = common_util.parse_repository_dependency_tuple( rd_tup )
for repository_dependency in repository_dependencies:
- tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
+ tool_shed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( repository_dependency )
if rd_tool_shed == tool_shed and rd_name == name and rd_owner == owner:
# Determine if the repository represented by the dependency tuple is an instance of the repository type TipOnly.
required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -54,7 +54,7 @@
for repository_dependency_components_list in val:
required_repository = None
rd_toolshed, rd_name, rd_owner, rd_changeset_revision, prior_installation_required = \
- suc.parse_repository_dependency_tuple( repository_dependency_components_list )
+ common_util.parse_repository_dependency_tuple( repository_dependency_components_list )
# Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision. This is the repository that will be
# required by the current dependent_repository.
# TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
@@ -254,7 +254,7 @@
invalid_repository_dependencies = invalid_repository_dependencies_dict.get( 'invalid_repository_dependencies', [] )
for repository_dependency_tup in invalid_repository_dependencies:
toolshed, name, owner, changeset_revision, prior_installation_required, error = \
- suc.parse_repository_dependency_tuple( repository_dependency_tup, contains_error=True )
+ common_util.parse_repository_dependency_tuple( repository_dependency_tup, contains_error=True )
if error:
message = '%s ' % str( error )
return message
@@ -279,7 +279,7 @@
if rd_key in [ 'root_key', 'description' ]:
continue
for rd_tup in rd_tups:
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup )
+ rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = common_util.parse_repository_dependency_tuple( rd_tup )
if rd_toolshed == toolshed_base_url and \
rd_name == repository.name and \
rd_owner == repository.user.username and \
@@ -370,7 +370,8 @@
for key_rd_dict in key_rd_dicts:
key = key_rd_dict.keys()[ 0 ]
repository_dependency = key_rd_dict[ key ]
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
+ rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = \
+ common_util.parse_repository_dependency_tuple( repository_dependency )
if suc.tool_shed_is_this_tool_shed( rd_toolshed ):
repository = suc.get_repository_by_name_and_owner( trans.app, rd_name, rd_owner )
if repository:
@@ -457,7 +458,7 @@
def handle_key_rd_dicts_for_repository( trans, current_repository_key, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, circular_repository_dependencies ):
key_rd_dict = repository_key_rd_dicts.pop( 0 )
repository_dependency = key_rd_dict[ current_repository_key ]
- toolshed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
+ toolshed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( repository_dependency )
if suc.tool_shed_is_this_tool_shed( toolshed ):
required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
required_repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans,
@@ -686,11 +687,11 @@
clean_key_rd_dicts = []
key = key_rd_dicts[ 0 ].keys()[ 0 ]
repository_tup = key.split( container_util.STRSEP )
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = suc.parse_repository_dependency_tuple( repository_tup )
+ rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = common_util.parse_repository_dependency_tuple( repository_tup )
for key_rd_dict in key_rd_dicts:
k = key_rd_dict.keys()[ 0 ]
repository_dependency = key_rd_dict[ k ]
- toolshed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
+ toolshed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( repository_dependency )
if rd_toolshed == toolshed and rd_name == name and rd_owner == owner:
log.debug( "Removing repository dependency for repository %s owned by %s since it refers to a revision within itself." % ( name, owner ) )
else:
@@ -700,7 +701,7 @@
return clean_key_rd_dicts
def get_repository_dependency_as_key( repository_dependency ):
- tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
+ tool_shed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( repository_dependency )
return container_util.generate_repository_dependencies_key_for_repository( tool_shed, name, owner, changeset_revision, str( prior_installation_required ) )
def get_repository_dependency_by_repository_id( trans, decoded_repository_id ):
diff -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 -r 89813e684db6ba8582aabc7cd045894d2021c872 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -237,8 +237,8 @@
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
"""
- Update a tool shed repository record in the Galaxy database with the new information received. If a record defined by the received tool shed, repository name
- and owner does not exists, create a new record with the received information.
+ Update a tool shed repository record in the Galaxy database with the new information received. If a record defined by the received tool shed,
+ repository name and owner does not exist, create a new record with the received information.
"""
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
# to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
@@ -312,7 +312,7 @@
def generate_clone_url_from_repo_info_tup( repo_info_tup ):
"""Generate teh URL for cloning a repositoyr given a tuple of toolshed, name, owner, changeset_revision."""
# Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False]
- toolshed, name, owner, changeset_revision, prior_installation_required = parse_repository_dependency_tuple( repo_info_tup )
+ toolshed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( repo_info_tup )
# Don't include the changeset_revision in clone urls.
return url_join( toolshed, 'repos', owner, name )
@@ -947,7 +947,7 @@
This method is used in the Tool Shed when exporting a repository and it's dependencies, and in Galaxy when a repository and it's dependencies
are being installed. Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received
tsr_ids. If so, then determine whether that required repository should be imported / installed prior to it's dependent repository. Return a list
- of encoded repository ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be impoerted / installed
+ of encoded repository ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be imported / installed
prior to the dependent repository associated with the received repository_dependencies.
"""
prior_tsr_ids = []
@@ -956,7 +956,7 @@
if key in [ 'description', 'root_key' ]:
continue
for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required = parse_repository_dependency_tuple( rd_tup )
+ tool_shed, name, owner, changeset_revision, prior_installation_required = common_util.parse_repository_dependency_tuple( rd_tup )
if asbool( prior_installation_required ):
if trans.webapp.name == 'galaxy':
repository = get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
@@ -983,7 +983,7 @@
.order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
.all()
if len( all_metadata_records ) > 1:
- # Delete all recrds older than the last one updated.
+ # Delete all records older than the last one updated.
for repository_metadata in all_metadata_records[ 1: ]:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
@@ -1308,28 +1308,6 @@
folder_contents.append( node )
return folder_contents
-def parse_repository_dependency_tuple( repository_dependency_tuple, contains_error=False ):
- if contains_error:
- if len( repository_dependency_tuple ) == 5:
- # Metadata should have been reset on the repository containing this repository_dependency definition.
- tool_shed, name, owner, changeset_revision, error = repository_dependency_tuple
- # Default prior_installation_required to False.
- prior_installation_required = False
- elif len( repository_dependency_tuple ) == 6:
- toolshed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple
- prior_installation_required = str( prior_installation_required )
- return toolshed, name, owner, changeset_revision, prior_installation_required, error
- else:
- if len( repository_dependency_tuple ) == 4:
- # Metadata should have been reset on the repository containing this repository_dependency definition.
- tool_shed, name, owner, changeset_revision = repository_dependency_tuple
- # Default prior_installation_required to False.
- prior_installation_required = False
- elif len( repository_dependency_tuple ) == 5:
- tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tuple
- prior_installation_required = str( prior_installation_required )
- return tool_shed, name, owner, changeset_revision, prior_installation_required
-
def pretty_print( dict=None ):
if dict:
return json.to_json_string( dict, sort_keys=True, indent=4 * ' ' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e97f613a4948/
Changeset: e97f613a4948
User: Dave Bouvier
Date: 2013-08-26 15:07:54
Summary: Another instance of the API referencing the wrong attribute.
Affected #: 1 file
diff -r a86290c120d3aafb321f73d5ff57e777e8946a41 -r e97f613a4948dd641e66019e0c5eb72a8660f2e2 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3440,7 +3440,7 @@
value_mapper = {}
rval = {}
try:
- visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' )
+ visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
except AttributeError:
raise Exception( 'Unknown API view: %s' % view )
for key in visible_keys:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a86290c120d3/
Changeset: a86290c120d3
User: jgoecks
Date: 2013-08-24 16:31:23
Summary: Fix Sweepster bug that prevented changing parameter tree after running sweeps.
Affected #: 1 file
diff -r 5b4c4cf9bc9619d51d7f2471d64896dcdd93707c -r a86290c120d3aafb321f73d5ff57e777e8946a41 static/scripts/viz/sweepster.js
--- a/static/scripts/viz/sweepster.js
+++ b/static/scripts/viz/sweepster.js
@@ -920,11 +920,16 @@
// Set inputs and run tool.
tool.set_input_values(pm_track.get('settings').get('values'));
$.when(tool.rerun(dataset, regions)).then(function(output) {
- // HACK: output is an HDA with track config attribute. To create a track, rearrange
- // track config to point to HDA.
+ // HACKish: output is an HDA with track config attribute. To create a track
+ // that works correctly with Backbone relational, it is necessary to
+ // use a modified version of the track config.
var dataset = output.first(),
track_config = dataset.get('track_config');
+ // Set dataset to be the tool's output.
track_config.dataset = dataset;
+ // Set tool to null so that it is not unpacked; unpacking it messes with
+ // the tool parameters and parameter tree.
+ track_config.tool = null;
// Create and add track for output dataset.
var track_obj = tracks.object_from_template(track_config, self, null);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.