1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/be29d1334c08/
changeset: be29d1334c08
user: jgoecks
date: 2013-01-09 15:07:58
summary: Add missing import.
affected #: 1 file
diff -r 66bbee3c9c02b343c4ab621d39efa261b261f237 -r be29d1334c08ec3e32290feab9a6da7bda9862ec lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -5,6 +5,7 @@
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesVisualizationMixin, VALID_SLUG_RE
from galaxy.web.framework.helpers import time_ago, grids, iff
+from galaxy import util
from galaxy.util.json import from_json_string
from galaxy.util.sanitize_html import sanitize_html
from galaxy.visualization.genomes import decode_dbkey
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/66bbee3c9c02/
changeset: 66bbee3c9c02
user: greg
date: 2013-01-08 22:30:14
summary: Handle repository dependency objects appropriately if the user elected to not install them.
affected #: 2 files
diff -r 2ec609faae49b4ab32190403669849f1af0b68f6 -r 66bbee3c9c02b343c4ab621d39efa261b261f237 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,7 +2,7 @@
from galaxy.datatypes import checkers
from galaxy.web import url_for
from galaxy import util
-from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import json
from galaxy.webapps.community.util import container_util
import shed_util_common as suc
import galaxy.tools
@@ -184,19 +184,20 @@
instance and when uninstalled repositories are being reinstalled.
"""
message = ''
- # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys.
+ # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository
+ # dependencies that may not be installed.
+ all_created_or_updated_tool_shed_repositories = []
+ # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys. The following
+ # list will filter out repository dependencies that are not to be installed.
created_or_updated_tool_shed_repositories = []
tool_panel_section_keys = []
# Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
filtered_repo_info_dicts = []
- if install_repository_dependencies:
- # Discover all repository dependencies and retrieve information for installing them.
- all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
- if not all_repo_info_dicts:
- # No repository dependencies were discovered so process the received repositories.
- all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
- else:
- # The user chose to not install repository dependencies, so process the received repositories.
+ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies,
+ # we have to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
+ all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+ if not all_repo_info_dicts:
+ # No repository dependencies were discovered so process the received repositories.
all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
for repo_info_dict in all_repo_info_dicts:
for name, repo_info_tuple in repo_info_dict.items():
@@ -214,16 +215,19 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
if reinstalling or install_repository_dependencies:
- if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
- can_update = True
- name = installed_tool_shed_repository.name
- description = installed_tool_shed_repository.description
- installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
- metadata_dict = installed_tool_shed_repository.metadata
- dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
+ if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ can_update = True
+ name = installed_tool_shed_repository.name
+ description = installed_tool_shed_repository.description
+ installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
+ metadata_dict = installed_tool_shed_repository.metadata
+ dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ else:
+ # There is a repository already installed which is a dependency of the repository being reinstalled.
+ can_update = False
else:
- # There is a repository already installed which is a dependency of the repository being reinstalled.
can_update = False
else:
if len( all_repo_info_dicts ) == 1:
@@ -243,14 +247,16 @@
created_or_updated_tool_shed_repositories.append( installed_tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
+ elif is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ can_update = True
else:
- can_update = True
+ can_update = False
else:
# A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository
# is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled.
can_update = True
installed_changeset_revision = changeset_revision
- metadata_dict={}
+ metadata_dict = {}
dist_to_shed = False
if can_update:
if reinstalling or install_repository_dependencies:
@@ -294,9 +300,14 @@
current_changeset_revision=changeset_revision,
owner=repository_owner,
dist_to_shed=False )
- created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- tool_panel_section_keys.append( tool_panel_section_key )
- filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ # Only append the tool_shed_repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
+ if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ tool_panel_section_keys.append( tool_panel_section_key )
+ filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ # Build repository dependency relationships even if the user chose to not install repository dependencies.
+ suc.build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
@@ -679,6 +690,73 @@
tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
+def get_repository_readme_and_dependencies_for_display( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ):
+ # If we're installing a single repository, see if it contains a readme or dependencies that we can display.
+ name = repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ # Handle README files.
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = json.from_json_string( raw_text )
+ if repository_dependencies:
+ missing_td = {}
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # In this case, a record for the repository will exist in the database with the status of 'New'.
+ repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
+ if repository and repository.metadata:
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies( trans, repository )
+ else:
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ all_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, rid_installed_td = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if rid_installed_td:
+ for td_key, td_dict in rid_installed_td.items():
+ if td_key not in all_tool_dependencies:
+ all_tool_dependencies[ td_key ] = td_dict
+ if all_tool_dependencies:
+ if installed_td is None:
+ installed_td = {}
+ else:
+ # Move all tool dependencies to the missing_tool_dependencies container.
+ for td_key, td_dict in installed_td.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ installed_td = {}
+ # Discover and categorize all tool dependencies defined for this repository's repository dependencies.
+ required_tool_dependencies, required_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
+ if required_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_tool_dependencies.items():
+ if td_key not in installed_td:
+ installed_td[ td_key ] = td_dict
+ if required_missing_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_tool_dependencies.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ else:
+ installed_rd = None
+ missing_rd = None
+ missing_td = None
+ return name, repository_owner, changeset_revision, readme_files_dict, includes_tool_dependencies, installed_rd, missing_rd, installed_td, missing_td
def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ):
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
@@ -716,7 +794,7 @@
text = response.read()
response.close()
if text:
- required_repo_info_dict = from_json_string( text )
+ required_repo_info_dict = json.from_json_string( text )
required_repo_info_dicts = []
encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
for encoded_dict_str in encoded_dict_strings:
@@ -1015,6 +1093,18 @@
return False
# Default to copying the file if none of the above are true.
return True
+def is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ """Return True if the received repo_info_dict is contained in the list of received repo_info_dicts."""
+ for name, repo_info_tuple in repo_info_dict.items():
+ for rid in repo_info_dicts:
+ for rid_name, rid_repo_info_tuple in rid.items():
+ if rid_name == name:
+ if len( rid_repo_info_tuple ) == len( repo_info_tuple ):
+ for item in rid_repo_info_tuple:
+ if item not in repo_info_tuple:
+ return False
+ return True
+ return False
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype converters
app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
@@ -1073,7 +1163,7 @@
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
- readme_files_dict = from_json_string( raw_text )
+ readme_files_dict = json.from_json_string( raw_text )
else:
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
diff -r 2ec609faae49b4ab32190403669849f1af0b68f6 -r 66bbee3c9c02b343c4ab621d39efa261b261f237 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1220,9 +1220,6 @@
message=message,
status='error' ) )
if created_or_updated_tool_shed_repositories:
- if install_repository_dependencies:
- # Build repository dependency relationships.
- suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
# Handle contained tools.
if includes_tools and ( new_tool_panel_section or tool_panel_section ):
if new_tool_panel_section:
@@ -1290,73 +1287,10 @@
if len( repo_info_dicts ) == 1:
# If we're installing a single repository, see if it contains a readme or dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
- name = repo_info_dict.keys()[ 0 ]
- repo_info_tuple = repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- # Handle README files.
- url = suc.url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( name, repository_owner, changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = json.from_json_string( raw_text )
- if repository_dependencies:
- missing_tool_dependencies = {}
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
- repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
- if repository and repository.metadata:
- installed_repository_dependencies, missing_repository_dependencies = \
- shed_util.get_installed_and_missing_repository_dependencies( trans, repository )
- else:
- installed_repository_dependencies, missing_repository_dependencies = \
- shed_util.get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
- # Discover all repository dependencies and retrieve information for installing them.
- required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
- # Display tool dependencies defined for each of the repository dependencies.
- if required_repo_info_dicts:
- all_tool_dependencies = {}
- for rid in required_repo_info_dicts:
- for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if tool_dependencies:
- for td_key, td_dict in tool_dependencies.items():
- if td_key not in all_tool_dependencies:
- all_tool_dependencies[ td_key ] = td_dict
- if all_tool_dependencies:
- if tool_dependencies is None:
- tool_dependencies = {}
- else:
- # Move all tool dependencies to the missing_tool_dependencies container.
- for td_key, td_dict in tool_dependencies.items():
- if td_key not in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ] = td_dict
- tool_dependencies = {}
- # Discover and categorize all tool dependencies defined this this repository's repository dependencies.
- required_tool_dependencies, required_missing_tool_dependencies = \
- shed_util.get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
- if required_tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_tool_dependencies.items():
- if td_key not in tool_dependencies:
- tool_dependencies[ td_key ] = td_dict
- if required_missing_tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_missing_tool_dependencies.items():
- if td_key not in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ] = td_dict
- else:
- installed_repository_dependencies = None
- missing_repository_dependencies = None
- missing_tool_dependencies = None
- required_repo_info_dicts = None
- # Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
- # are set to None since we don't yet know what they are.
+ name, repository_owner, changeset_revision, readme_files_dict, includes_tool_dependencies, \
+ installed_repository_dependencies, missing_repository_dependencies, tool_dependencies, missing_tool_dependencies = \
+ shed_util.get_repository_readme_and_dependencies_for_display( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
repository_name=name,
@@ -1374,7 +1308,7 @@
workflows=None,
new_install=True,
reinstalling=False )
- # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we're retrieved the list of installed
+ # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
# dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since
@@ -1489,24 +1423,16 @@
repository_dependencies=repository_dependencies )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
- if install_repository_dependencies:
- created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
- reinstalling=True,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
- if len( created_or_updated_tool_shed_repositories ) > 1:
- # Build repository dependency relationships.
- suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
- else:
- filtered_repo_info_dicts = [ repo_info_dict for repo_info_dict in repo_info_dicts ]
- created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
- tool_panel_section_keys.append( tool_panel_section_key )
+ created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
+ shed_util.create_repository_dependency_objects( trans,
+ tool_path,
+ tool_shed_url,
+ repo_info_dicts,
+ reinstalling=True,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
# Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2ec609faae49/
changeset: 2ec609faae49
user: greg
date: 2013-01-08 19:50:16
summary: When installing a new repository from the too shed repository to Galaxy or reinstalling an uninstalled repository, handle tool dependencies defined for repository dependencies defined for the repository being installed.
affected #: 6 files
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -653,6 +653,22 @@
tool_dependencies = None
missing_tool_dependencies = None
return tool_dependencies, missing_tool_dependencies
+def get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies ):
+ """Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy."""
+ # FIXME: this method currently populates and returns only missing tool dependencies since tool dependencies defined for complex repository dependency
+ # relationships is not currently supported. This method should be enhanced to search for installed tool dependencies defined as complex repository
+ # dependency relationships when that feature is implemented.
+ if all_tool_dependencies:
+ tool_dependencies = {}
+ missing_tool_dependencies = {}
+ for td_key, val in all_tool_dependencies.items():
+ # Since we have a new install, missing tool dependencies have never been installed.
+ val[ 'status' ] = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ missing_tool_dependencies[ td_key ] = val
+ else:
+ tool_dependencies = None
+ missing_tool_dependencies = None
+ return tool_dependencies, missing_tool_dependencies
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -667,7 +683,7 @@
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
- this methid is required to retrieve all repository dependencies.
+ this method is required to retrieve all repository dependencies.
"""
all_repo_info_dicts = []
if repo_info_dicts:
@@ -1035,7 +1051,7 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False, required_repo_info_dicts=None ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This method is called only
from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled
@@ -1064,15 +1080,51 @@
readme_files_dict = None
# Handle repository dependencies.
installed_repository_dependencies, missing_repository_dependencies = get_installed_and_missing_repository_dependencies( trans, repository )
- # Handle tool dependencies.
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- installed_tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ # Handle the current repository's tool dependencies.
+ repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies( trans, repository, repository_tool_dependencies )
if reinstalling:
- # All tool dependencies will be considered missing since we are reinstalling the repository.
- if installed_tool_dependencies:
- for td in installed_tool_dependencies:
- missing_tool_dependencies.append( td )
- installed_tool_dependencies = None
+ installed_tool_dependencies = None
+ missing_tool_dependencies = None
+ if repository_installed_tool_dependencies is None:
+ repository_installed_tool_dependencies = {}
+ if repository_missing_tool_dependencies is None:
+ repository_missing_tool_dependencies = {}
+ if required_repo_info_dicts:
+ # Handle the tool dependencies defined for each of the repository's repository dependencies.
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
+ required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, repository_owner, changeset_revision )
+ if not required_repository:
+ # The required_repository may have been installed with a different changeset revision.
+ required_repository, installed_changeset_revision = repository_was_previously_installed( trans,
+ tool_shed_url,
+ name,
+ repo_info_tuple,
+ clone_dir )
+ if required_repository:
+ required_repository_installed_tool_dependencies, required_repository_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies( trans, required_repository, tool_dependencies )
+ if required_repository_installed_tool_dependencies:
+ for td_key, td_dict in required_repository_installed_tool_dependencies.items():
+ if td_key not in repository_installed_tool_dependencies:
+ repository_installed_tool_dependencies[ td_key ] = td_dict
+ if required_repository_missing_tool_dependencies:
+ for td_key, td_dict in required_repository_missing_tool_dependencies.items():
+ if td_key not in repository_missing_tool_dependencies:
+ repository_missing_tool_dependencies[ td_key ] = td_dict
+ if repository_installed_tool_dependencies:
+ installed_tool_dependencies = repository_installed_tool_dependencies
+ if repository_missing_tool_dependencies:
+ missing_tool_dependencies = repository_missing_tool_dependencies
+ else:
+ installed_tool_dependencies = repository_installed_tool_dependencies
+ missing_tool_dependencies = repository_missing_tool_dependencies
# Handle valid tools.
valid_tools = metadata.get( 'tools', None )
# Handle workflows.
@@ -1092,7 +1144,8 @@
tool_dependencies=installed_tool_dependencies,
valid_tools=valid_tools,
workflows=workflows,
- new_install=False )
+ new_install=False,
+ reinstalling=reinstalling )
else:
containers_dict = dict( datatypes=None,
invalid_tools=None,
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -78,7 +78,7 @@
return readme_files_dict
def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes,
invalid_tools, missing_repository_dependencies, missing_tool_dependencies, readme_files_dict,
- repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False ):
+ repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False, reinstalling=False ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -160,7 +160,8 @@
tool_dependencies,
label=label,
missing=False,
- new_install=new_install )
+ new_install=new_install,
+ reinstalling=reinstalling )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
@@ -170,7 +171,8 @@
missing_tool_dependencies,
label='Missing tool dependencies',
missing=True,
- new_install=new_install )
+ new_install=new_install,
+ reinstalling=reinstalling )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
@@ -790,7 +792,7 @@
Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy from two places:
+ This method is called from Galaxy in two places:
1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
repository and repository_metadata will be objects.
2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
@@ -2414,6 +2416,47 @@
removed = True
error_message = ''
return removed, error_message
+def repository_dependencies_have_tool_dependencies( trans, repository_dependencies ):
+ """
+ repository_dependencies':
+ {'http://localhost:9009__ESEP__emboss_6__ESEP__test__ESEP__92bedb60b0c9':
+ [['http://localhost:9009', 'emboss_datatypes', 'test', '27df73fe48a6']],
+ 'root_key': 'http://localhost:9009__ESEP__emboss__ESEP__test__ESEP__06d729cb3f34',
+ 'description': 'required to enable emboss 6 tools',
+ 'http://localhost:9009__ESEP__emboss__ESEP__test__ESEP__06d729cb3f34':
+ [['http://localhost:9009', 'emboss_6', 'test', '92bedb60b0c9']]}}
+ """
+ rd_tups_processed = []
+ for key, rd_tups in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ rd_tup = container_util.get_components_from_key( key )
+ if rd_tup not in rd_tups_processed:
+ toolshed, name, owner, changeset_revision = rd_tup
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ return True
+ rd_tups_processed.append( rd_tup )
+ for rd_tup in rd_tups:
+ if rd_tup not in rd_tups_processed:
+ toolshed, name, owner, changeset_revision = rd_tup
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ return True
+ rd_tups_processed.append( rd_tup )
+ return False
def reset_all_metadata_on_installed_repository( trans, id ):
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
repository = get_installed_tool_shed_repository( trans, id )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -786,9 +786,9 @@
operation='preview_tools_in_changeset',
repository_id=repository_id )
self.valid_repository_grid.operations = [ grids.GridOperation( "Preview and install",
- url_args=url_args,
- allow_multiple=False,
- async_compatible=False ) ]
+ url_args=url_args,
+ allow_multiple=False,
+ async_compatible=False ) ]
return self.valid_repository_grid( trans, **kwd )
def __build_allow_push_select_field( self, trans, current_push_list, selected_value='none' ):
options = []
@@ -1410,7 +1410,10 @@
repo_info_dicts=repo_info_dicts )
@web.json
def get_required_repo_info_dict( self, trans, encoded_str ):
- """Retrive a list of dictionaries that each contain all of the information needed to install the list of repositories defined by encoded_str."""
+ """
+ Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
+ repositories defined by the received encoded_str.
+ """
encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
decoded_required_repository_tups = []
@@ -1430,7 +1433,7 @@
return repo_info_dict
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
- """Handle a request from the InstallManager of a local Galaxy instance."""
+ """Handle a request from a Galaxy instance."""
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -291,7 +291,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False, reinstalling=False ):
"""Return a folder hierarchy containing tool dependencies."""
# The status will be displayed only if the received value for missing is True. When this is the case, we're in Galaxy (not the tool shed)
# and the tool dependencies are not installed or are in an error state, so they are considered missing. The tool dependency status will be
@@ -304,7 +304,9 @@
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
if trans.webapp.name == 'galaxy':
- if missing:
+ if reinstalling:
+ folder.description = "this repository's tools require handling of these dependencies"
+ elif missing and not reinstalling:
folder.description = 'click the name to install the missing dependency'
else:
if new_install:
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1024,6 +1024,8 @@
decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd )
tsr_ids = decoded_kwd[ 'tool_shed_repository_ids' ]
tool_panel_section_keys = decoded_kwd[ 'tool_panel_section_keys' ]
+ repo_info_dicts = decoded_kwd[ 'repo_info_dicts' ]
+ filtered_repo_info_dicts = []
filtered_tool_panel_section_keys = []
repositories_for_installation = []
for index, tsr_id in enumerate( tsr_ids ):
@@ -1031,8 +1033,10 @@
if repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
+ filtered_repo_info_dicts.append( repo_info_dicts[ index ] )
filtered_tool_panel_section_keys.append( tool_panel_section_keys[ index ] )
if repositories_for_installation:
+ decoded_kwd[ 'repo_info_dicts' ] = filtered_repo_info_dicts
decoded_kwd[ 'tool_panel_section_keys' ] = filtered_tool_panel_section_keys
self.install_tool_shed_repositories( trans, repositories_for_installation, reinstalling=reinstalling, **decoded_kwd )
else:
@@ -1152,7 +1156,6 @@
return trans.show_error_message( message )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
tool_shed_url = kwd[ 'tool_shed_url' ]
# Handle repository dependencies.
includes_repository_dependencies = util.string_as_bool( kwd.get( 'includes_repository_dependencies', False ) )
@@ -1259,7 +1262,6 @@
shed_tool_conf=shed_tool_conf,
status=status,
tool_path=tool_path,
-
tool_panel_section_keys=tool_panel_section_keys,
tool_shed_repository_ids=tsrids_list,
tool_shed_url=tool_shed_url )
@@ -1300,9 +1302,10 @@
raw_text = response.read()
response.close()
readme_files_dict = json.from_json_string( raw_text )
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
if repository_dependencies:
+ missing_tool_dependencies = {}
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # In this case, a record for the repository will exist in the database with the status of 'New'.
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
installed_repository_dependencies, missing_repository_dependencies = \
@@ -1310,9 +1313,48 @@
else:
installed_repository_dependencies, missing_repository_dependencies = \
shed_util.get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ all_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ for td_key, td_dict in tool_dependencies.items():
+ if td_key not in all_tool_dependencies:
+ all_tool_dependencies[ td_key ] = td_dict
+ if all_tool_dependencies:
+ if tool_dependencies is None:
+ tool_dependencies = {}
+ else:
+ # Move all tool dependencies to the missing_tool_dependencies container.
+ for td_key, td_dict in tool_dependencies.items():
+ if td_key not in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ] = td_dict
+ tool_dependencies = {}
+ # Discover and categorize all tool dependencies defined this this repository's repository dependencies.
+ required_tool_dependencies, required_missing_tool_dependencies = \
+ shed_util.get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
+ if required_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_tool_dependencies.items():
+ if td_key not in tool_dependencies:
+ tool_dependencies[ td_key ] = td_dict
+ if required_missing_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_tool_dependencies.items():
+ if td_key not in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ] = td_dict
else:
installed_repository_dependencies = None
missing_repository_dependencies = None
+ missing_tool_dependencies = None
+ required_repo_info_dicts = None
# Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
# are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
@@ -1324,13 +1366,14 @@
datatypes=None,
invalid_tools=None,
missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=None,
+ missing_tool_dependencies=missing_tool_dependencies,
readme_files_dict=readme_files_dict,
repository_dependencies=installed_repository_dependencies,
tool_dependencies=tool_dependencies,
valid_tools=None,
workflows=None,
- new_install=True )
+ new_install=True,
+ reinstalling=False )
# We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we're retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
@@ -1338,6 +1381,7 @@
# each displayed repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
else:
+ # FIXME: support the intallation of repository dependencies and tool dependencies for a list of tool shed repositories being installed.
containers_dict = dict( datatypes=None,
invalid_tools=None,
readme_files_dict=None,
@@ -1395,6 +1439,8 @@
tool_panel_section_key = None
tool_panel_section_keys = []
metadata = tool_shed_repository.metadata
+ # Keep track of tool dependencies define dfor the current repository or those defined for any of it's repository dependencies.
+ includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
if tool_shed_repository.includes_tools:
# Handle the selected tool panel location for loading tools included in the tool shed repository.
tool_section, new_tool_panel_section, tool_panel_section_key = \
@@ -1422,8 +1468,7 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
- # Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
- # were introduced, it may never happen, but we'll keep the block just in case.
+ # Entering this else block occurs only if the tool_shed_repository does not include any valid tools.
if install_repository_dependencies:
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
@@ -1462,13 +1507,13 @@
filtered_repo_info_dicts = [ repo_info_dict for repo_info_dict in repo_info_dicts ]
created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
tool_panel_section_keys.append( tool_panel_section_key )
- # Defaulot the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
+ # Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
if tps_key is None:
tool_panel_section_keys[ index ] = tool_panel_section_key
encoded_repository_ids = [ trans.security.encode_id( r.id ) for r in created_or_updated_tool_shed_repositories ]
- new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
+ new_kwd = dict( includes_tool_dependencies=includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
install_tool_dependencies=install_tool_dependencies,
repo_info_dicts=filtered_repo_info_dicts,
@@ -1539,13 +1584,17 @@
repository_metadata=None,
metadata=metadata,
repository_dependencies=repository_dependencies )
+ if includes_repository_dependencies:
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ else:
+ required_repo_info_dicts = None
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if tool_panel_dict:
if shed_util.panel_entry_per_tool( tool_panel_dict ):
- # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
- # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ # The following forces everything to be loaded into 1 section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
tool_section_dict = tool_section_dicts[ 0 ]
original_section_name = tool_section_dict[ 'name' ]
@@ -1567,7 +1616,14 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, tool_shed_repository, reinstalling=True )
+ # Populate the containers_dict from the metadata for the tool shed repository we're reinstalling, but make sure to include tool dependencies defined for
+ # all of the repository's repository dependencies.
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ repository=tool_shed_repository,
+ reinstalling=True,
+ required_repo_info_dicts=required_repo_info_dicts )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -91,6 +91,7 @@
repository_dependencies_root_folder = containers_dict[ 'repository_dependencies' ]
tool_dependencies_root_folder = containers_dict[ 'tool_dependencies' ]
+ missing_tool_dependencies_root_folder = containers_dict[ 'missing_tool_dependencies' ]
env_settings_heaader_row_displayed = False
package_header_row_displayed = False
%>
@@ -122,7 +123,7 @@
<div style="clear: both"></div></div>
%endif
- %if tool_dependencies_root_folder:
+ %if tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
%if install_tool_dependencies_check_box is not None:
<div class="form-row"><label>Handle tool dependencies?</label>
@@ -138,14 +139,26 @@
</div><div style="clear: both"></div>
%endif
- <div class="form-row">
- <p/>
- <% row_counter = RowCounter() %>
- <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
- ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
- </table>
- <div style="clear: both"></div>
- </div>
+ %if tool_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+ ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ %if missing_tool_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+ ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
%endif
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/530fb4f8204f/
changeset: 530fb4f8204f
user: dan
date: 2013-01-08 16:41:32
summary: Fix for grouping parameters value_from_basic when ignore_errors is True. Fixes issue seen in workflows and rerun where an invalid stored value (e.g. due to changing parameter types) is provided.
affected #: 1 file
diff -r 94bfcccdaf0625448e623d1d88ba9a1b7785ee78 -r 530fb4f8204f2106e11f419c381fd53d2319ce24 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -68,21 +68,25 @@
return rval
def value_from_basic( self, value, app, ignore_errors=False ):
rval = []
- for i, d in enumerate( value ):
- rval_dict = {}
- # If the special __index__ key is not set, create it (for backward
- # compatibility)
- rval_dict['__index__'] = d.get( '__index__', i )
- # Restore child inputs
- for input in self.inputs.itervalues():
- if ignore_errors and input.name not in d:
- # If we do not have a value, and are ignoring errors, we simply
- # do nothing. There will be no value for the parameter in the
- # conditional's values dictionary.
- pass
- else:
- rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
- rval.append( rval_dict )
+ try:
+ for i, d in enumerate( value ):
+ rval_dict = {}
+ # If the special __index__ key is not set, create it (for backward
+ # compatibility)
+ rval_dict['__index__'] = d.get( '__index__', i )
+ # Restore child inputs
+ for input in self.inputs.itervalues():
+ if ignore_errors and input.name not in d:
+ # If we do not have a value, and are ignoring errors, we simply
+ # do nothing. There will be no value for the parameter in the
+ # conditional's values dictionary.
+ pass
+ else:
+ rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+ rval.append( rval_dict )
+ except Exception, e:
+ if not ignore_errors:
+ raise e
return rval
def visit_inputs( self, prefix, value, callback ):
for i, d in enumerate( value ):
@@ -441,24 +445,28 @@
return rval
def value_from_basic( self, value, app, ignore_errors=False ):
rval = dict()
- current_case = rval['__current_case__'] = value['__current_case__']
- # Test param
- if ignore_errors and self.test_param.name not in value:
- # If ignoring errors, do nothing. However this is potentially very
- # problematic since if we are missing the value of test param,
- # the entire conditional is wrong.
- pass
- else:
- rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors )
- # Inputs associated with current case
- for input in self.cases[current_case].inputs.itervalues():
- if ignore_errors and input.name not in value:
- # If we do not have a value, and are ignoring errors, we simply
- # do nothing. There will be no value for the parameter in the
- # conditional's values dictionary.
+ try:
+ current_case = rval['__current_case__'] = value['__current_case__']
+ # Test param
+ if ignore_errors and self.test_param.name not in value:
+ # If ignoring errors, do nothing. However this is potentially very
+ # problematic since if we are missing the value of test param,
+ # the entire conditional is wrong.
pass
else:
- rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+ rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors )
+ # Inputs associated with current case
+ for input in self.cases[current_case].inputs.itervalues():
+ if ignore_errors and input.name not in value:
+ # If we do not have a value, and are ignoring errors, we simply
+ # do nothing. There will be no value for the parameter in the
+ # conditional's values dictionary.
+ pass
+ else:
+ rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+ except Exception, e:
+ if not ignore_errors:
+ raise e
return rval
def visit_inputs( self, prefix, value, callback ):
current_case = value['__current_case__']
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/94bfcccdaf06/
changeset: 94bfcccdaf06
user: jgoecks
date: 2013-01-08 15:25:16
summary: Fix bug in bowtie2 wrapper that prevented wrapper from working with old versions of samtools.
affected #: 1 file
diff -r 989c789fbc43b0d9960655752c6ba396ac7618af -r 94bfcccdaf0625448e623d1d88ba9a1b7785ee78 tools/sr_mapping/bowtie2_wrapper.py
--- a/tools/sr_mapping/bowtie2_wrapper.py
+++ b/tools/sr_mapping/bowtie2_wrapper.py
@@ -71,7 +71,7 @@
index_path = options.index_path
# Build bowtie command; use view and sort to create sorted bam.
- cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - sorted > %s'
+ cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - - > %s'
# Set up reads.
if options.single_paired == 'paired':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/989c789fbc43/
changeset: 989c789fbc43
user: james_taylor
date: 2013-01-07 22:25:54
summary: comment out two eggs from eggs.ini for now
affected #: 1 file
diff -r d8606b61644a09719a8042efd2b23b7aa7869ff2 -r 989c789fbc43b0d9960655752c6ba396ac7618af eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,7 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
-msgpack_python = 0.2.4
+; msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
@@ -66,7 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
-fluent_logger = 0.3.3
+; fluent_logger = 0.3.3
; extra version information
[tags]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d8606b61644a/
changeset: d8606b61644a
user: inithello
date: 2013-01-07 22:20:23
summary: Revert fix from 8530:df20658ac499.
affected #: 1 file
diff -r 09cf284087021586ad08656b7ea444959c6c49bf -r d8606b61644a09719a8042efd2b23b7aa7869ff2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -848,7 +848,8 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
tool_section = None
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
@@ -1418,7 +1419,8 @@
repo_info_dicts = []
repo_info_dict = kwd.get( 'repo_info_dict', None )
if repo_info_dict:
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
# Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
# were introduced, it may never happen, but we'll keep the block just in case.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.