galaxy-commits
Threads by month
- ----- 2026 -----
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: greg: Fix for correctly positioning tool shed repository tools in the proper tool panel location when the repository is being installed or reinstalled.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/857a60e69c7a/
changeset: 857a60e69c7a
user: greg
date: 2013-01-09 15:20:58
summary: Fix for correctly positioning tool shed repository tools in the proper tool panel location when the repository is being installed or reinstalled.
affected #: 1 file
diff -r be29d1334c08ec3e32290feab9a6da7bda9862ec -r 857a60e69c7aee92f67f37311a40a825b40a6481 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -184,17 +184,17 @@
instance and when uninstalled repositories are being reinstalled.
"""
message = ''
- # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository
- # dependencies that may not be installed.
+ # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository dependencies
+ # that may not be installed.
all_created_or_updated_tool_shed_repositories = []
- # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys. The following
- # list will filter out repository dependencies that are not to be installed.
+ # There will be a one-to-one mapping between items in 3 lists: created_or_updated_tool_shed_repositories, tool_panel_section_keys and filtered_repo_info_dicts.
+ # The 3 lists will filter out repository dependencies that are not to be installed.
created_or_updated_tool_shed_repositories = []
tool_panel_section_keys = []
- # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
+ # Repositories will be filtered (e.g., if already installed, if elected to not be installed, etc), so filter the associated repo_info_dicts accordingly.
filtered_repo_info_dicts = []
- # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies,
- # we have to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
+ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies we have
+ # to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
if not all_repo_info_dicts:
# No repository dependencies were discovered so process the received repositories.
@@ -215,9 +215,12 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
if reinstalling or install_repository_dependencies:
+ # If the user elected to install repository dependencies, all items in the all_repo_info_dicts list will be processed. However, if
+ # repository dependencies are not to be installed, only those items contained in the received repo_info_dicts list will be processed.
if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ # The current tool shed repository is not currently installed, so we can update it's record in the database.
can_update = True
name = installed_tool_shed_repository.name
description = installed_tool_shed_repository.description
@@ -225,13 +228,19 @@
metadata_dict = installed_tool_shed_repository.metadata
dist_to_shed = installed_tool_shed_repository.dist_to_shed
else:
- # There is a repository already installed which is a dependency of the repository being reinstalled.
+ # The tool shed repository currently being processed is already installed or is in the process of being installed, so it's record
+ # in the database cannot be updated.
can_update = False
else:
+ # This block will be reached only if reinstalling is True, install_repository_dependencies is False and is_in_repo_info_dicts is False.
+ # The tool shed repository currently being processed must be a repository dependency that the user elected to not install, so it's
+ # record in the database cannot be updated.
can_update = False
else:
+ # This block will be reached only if reinstalling is False and install_repository_dependencies is False. This implies that the tool shed
+ # repository currently being processed has already been installed.
if len( all_repo_info_dicts ) == 1:
- # An attempt is being made to install a tool shed repository into a Galaxy instance when the same repository was previously installed.
+ # If only a single repository is being installed, return an informative message to the user.
message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner )
if installed_changeset_revision != changeset_revision:
message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision
@@ -247,26 +256,24 @@
created_or_updated_tool_shed_repositories.append( installed_tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
- elif is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
- can_update = True
else:
+ # We're in the process of installing multiple tool shed repositories into Galaxy. Since the repository currently being processed
+ # has already been installed, skip it and process the next repository in the list.
can_update = False
else:
- # A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository
- # is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled.
+ # A tool shed repository is being installed into a Galaxy instance for the first time, or we're attempting to install it or reinstall it resulted
+ # in an error. In the latter case, the repository record in the database has no metadata and it's status has been set to 'New'. In either case,
+ # the repository's database record may be updated.
can_update = True
installed_changeset_revision = changeset_revision
metadata_dict = {}
dist_to_shed = False
if can_update:
+ # The database record for the tool shed repository currently being processed can be updated.
if reinstalling or install_repository_dependencies:
# Get the repository metadata to see where it was previously located in the tool panel.
- installed_tool_shed_repository = suc.get_repository_for_dependency_relationship( app=trans.app,
- tool_shed=tool_shed_url,
- name=name,
- owner=repository_owner,
- changeset_revision=changeset_revision )
if installed_tool_shed_repository:
+ # The tool shed repository status is one of 'New', 'Uninstalled', or 'Error'.
tool_section, new_tool_panel_section, tool_panel_section_key = \
handle_tool_panel_selection( trans=trans,
metadata=installed_tool_shed_repository.metadata,
@@ -274,6 +281,8 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
else:
+ # We're installing a new tool shed repository that does not yet have a database record. This repository is a repository dependency
+ # of a different repository being installed.
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
tool_panel_section_key = 'section_%s' % str( section_id )
@@ -282,6 +291,7 @@
else:
tool_panel_section_key = None
else:
+ # We're installing a new tool shed repository that does not yet have a database record.
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
tool_panel_section_key = 'section_%s' % str( section_id )
@@ -300,9 +310,11 @@
current_changeset_revision=changeset_revision,
owner=repository_owner,
dist_to_shed=False )
+ # Add the processed tool shed repository to the list of all processed repositories maintained within this method.
all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- # Only append the tool_shed_repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
+ # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ # Keep the one-to-one mapping between items in 3 lists.
created_or_updated_tool_shed_repositories.append( tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/be29d1334c08/
changeset: be29d1334c08
user: jgoecks
date: 2013-01-09 15:07:58
summary: Add missing import.
affected #: 1 file
diff -r 66bbee3c9c02b343c4ab621d39efa261b261f237 -r be29d1334c08ec3e32290feab9a6da7bda9862ec lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -5,6 +5,7 @@
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesVisualizationMixin, VALID_SLUG_RE
from galaxy.web.framework.helpers import time_ago, grids, iff
+from galaxy import util
from galaxy.util.json import from_json_string
from galaxy.util.sanitize_html import sanitize_html
from galaxy.visualization.genomes import decode_dbkey
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle repository dependency objects appropriately if the user elected to not install them.
by Bitbucket 08 Jan '13
by Bitbucket 08 Jan '13
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/66bbee3c9c02/
changeset: 66bbee3c9c02
user: greg
date: 2013-01-08 22:30:14
summary: Handle repository dependency objects appropriately if the user elected to not install them.
affected #: 2 files
diff -r 2ec609faae49b4ab32190403669849f1af0b68f6 -r 66bbee3c9c02b343c4ab621d39efa261b261f237 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,7 +2,7 @@
from galaxy.datatypes import checkers
from galaxy.web import url_for
from galaxy import util
-from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import json
from galaxy.webapps.community.util import container_util
import shed_util_common as suc
import galaxy.tools
@@ -184,19 +184,20 @@
instance and when uninstalled repositories are being reinstalled.
"""
message = ''
- # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys.
+ # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository
+ # dependencies that may not be installed.
+ all_created_or_updated_tool_shed_repositories = []
+ # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys. The following
+ # list will filter out repository dependencies that are not to be installed.
created_or_updated_tool_shed_repositories = []
tool_panel_section_keys = []
# Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
filtered_repo_info_dicts = []
- if install_repository_dependencies:
- # Discover all repository dependencies and retrieve information for installing them.
- all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
- if not all_repo_info_dicts:
- # No repository dependencies were discovered so process the received repositories.
- all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
- else:
- # The user chose to not install repository dependencies, so process the received repositories.
+ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies,
+ # we have to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
+ all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+ if not all_repo_info_dicts:
+ # No repository dependencies were discovered so process the received repositories.
all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
for repo_info_dict in all_repo_info_dicts:
for name, repo_info_tuple in repo_info_dict.items():
@@ -214,16 +215,19 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
if reinstalling or install_repository_dependencies:
- if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
- can_update = True
- name = installed_tool_shed_repository.name
- description = installed_tool_shed_repository.description
- installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
- metadata_dict = installed_tool_shed_repository.metadata
- dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
+ if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ can_update = True
+ name = installed_tool_shed_repository.name
+ description = installed_tool_shed_repository.description
+ installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
+ metadata_dict = installed_tool_shed_repository.metadata
+ dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ else:
+ # There is a repository already installed which is a dependency of the repository being reinstalled.
+ can_update = False
else:
- # There is a repository already installed which is a dependency of the repository being reinstalled.
can_update = False
else:
if len( all_repo_info_dicts ) == 1:
@@ -243,14 +247,16 @@
created_or_updated_tool_shed_repositories.append( installed_tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
+ elif is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ can_update = True
else:
- can_update = True
+ can_update = False
else:
# A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository
# is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled.
can_update = True
installed_changeset_revision = changeset_revision
- metadata_dict={}
+ metadata_dict = {}
dist_to_shed = False
if can_update:
if reinstalling or install_repository_dependencies:
@@ -294,9 +300,14 @@
current_changeset_revision=changeset_revision,
owner=repository_owner,
dist_to_shed=False )
- created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- tool_panel_section_keys.append( tool_panel_section_key )
- filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ # Only append the tool_shed_repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
+ if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ tool_panel_section_keys.append( tool_panel_section_key )
+ filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ # Build repository dependency relationships even if the user chose to not install repository dependencies.
+ suc.build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
@@ -679,6 +690,73 @@
tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
+def get_repository_readme_and_dependencies_for_display( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ):
+ # If we're installing a single repository, see if it contains a readme or dependencies that we can display.
+ name = repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ # Handle README files.
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = json.from_json_string( raw_text )
+ if repository_dependencies:
+ missing_td = {}
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # In this case, a record for the repository will exist in the database with the status of 'New'.
+ repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
+ if repository and repository.metadata:
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies( trans, repository )
+ else:
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ all_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, rid_installed_td = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if rid_installed_td:
+ for td_key, td_dict in rid_installed_td.items():
+ if td_key not in all_tool_dependencies:
+ all_tool_dependencies[ td_key ] = td_dict
+ if all_tool_dependencies:
+ if installed_td is None:
+ installed_td = {}
+ else:
+ # Move all tool dependencies to the missing_tool_dependencies container.
+ for td_key, td_dict in installed_td.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ installed_td = {}
+ # Discover and categorize all tool dependencies defined for this repository's repository dependencies.
+ required_tool_dependencies, required_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
+ if required_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_tool_dependencies.items():
+ if td_key not in installed_td:
+ installed_td[ td_key ] = td_dict
+ if required_missing_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_tool_dependencies.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ else:
+ installed_rd = None
+ missing_rd = None
+ missing_td = None
+ return name, repository_owner, changeset_revision, readme_files_dict, includes_tool_dependencies, installed_rd, missing_rd, installed_td, missing_td
def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ):
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
@@ -716,7 +794,7 @@
text = response.read()
response.close()
if text:
- required_repo_info_dict = from_json_string( text )
+ required_repo_info_dict = json.from_json_string( text )
required_repo_info_dicts = []
encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
for encoded_dict_str in encoded_dict_strings:
@@ -1015,6 +1093,18 @@
return False
# Default to copying the file if none of the above are true.
return True
+def is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ """Return True if the received repo_info_dict is contained in the list of received repo_info_dicts."""
+ for name, repo_info_tuple in repo_info_dict.items():
+ for rid in repo_info_dicts:
+ for rid_name, rid_repo_info_tuple in rid.items():
+ if rid_name == name:
+ if len( rid_repo_info_tuple ) == len( repo_info_tuple ):
+ for item in rid_repo_info_tuple:
+ if item not in repo_info_tuple:
+ return False
+ return True
+ return False
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype converters
app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
@@ -1073,7 +1163,7 @@
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
- readme_files_dict = from_json_string( raw_text )
+ readme_files_dict = json.from_json_string( raw_text )
else:
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
diff -r 2ec609faae49b4ab32190403669849f1af0b68f6 -r 66bbee3c9c02b343c4ab621d39efa261b261f237 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1220,9 +1220,6 @@
message=message,
status='error' ) )
if created_or_updated_tool_shed_repositories:
- if install_repository_dependencies:
- # Build repository dependency relationships.
- suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
# Handle contained tools.
if includes_tools and ( new_tool_panel_section or tool_panel_section ):
if new_tool_panel_section:
@@ -1290,73 +1287,10 @@
if len( repo_info_dicts ) == 1:
# If we're installing a single repository, see if it contains a readme or dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
- name = repo_info_dict.keys()[ 0 ]
- repo_info_tuple = repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- # Handle README files.
- url = suc.url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( name, repository_owner, changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = json.from_json_string( raw_text )
- if repository_dependencies:
- missing_tool_dependencies = {}
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
- repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
- if repository and repository.metadata:
- installed_repository_dependencies, missing_repository_dependencies = \
- shed_util.get_installed_and_missing_repository_dependencies( trans, repository )
- else:
- installed_repository_dependencies, missing_repository_dependencies = \
- shed_util.get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
- # Discover all repository dependencies and retrieve information for installing them.
- required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
- # Display tool dependencies defined for each of the repository dependencies.
- if required_repo_info_dicts:
- all_tool_dependencies = {}
- for rid in required_repo_info_dicts:
- for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if tool_dependencies:
- for td_key, td_dict in tool_dependencies.items():
- if td_key not in all_tool_dependencies:
- all_tool_dependencies[ td_key ] = td_dict
- if all_tool_dependencies:
- if tool_dependencies is None:
- tool_dependencies = {}
- else:
- # Move all tool dependencies to the missing_tool_dependencies container.
- for td_key, td_dict in tool_dependencies.items():
- if td_key not in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ] = td_dict
- tool_dependencies = {}
- # Discover and categorize all tool dependencies defined this this repository's repository dependencies.
- required_tool_dependencies, required_missing_tool_dependencies = \
- shed_util.get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
- if required_tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_tool_dependencies.items():
- if td_key not in tool_dependencies:
- tool_dependencies[ td_key ] = td_dict
- if required_missing_tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_missing_tool_dependencies.items():
- if td_key not in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ] = td_dict
- else:
- installed_repository_dependencies = None
- missing_repository_dependencies = None
- missing_tool_dependencies = None
- required_repo_info_dicts = None
- # Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
- # are set to None since we don't yet know what they are.
+ name, repository_owner, changeset_revision, readme_files_dict, includes_tool_dependencies, \
+ installed_repository_dependencies, missing_repository_dependencies, tool_dependencies, missing_tool_dependencies = \
+ shed_util.get_repository_readme_and_dependencies_for_display( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
repository_name=name,
@@ -1374,7 +1308,7 @@
workflows=None,
new_install=True,
reinstalling=False )
- # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we're retrieved the list of installed
+ # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
# dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since
@@ -1489,24 +1423,16 @@
repository_dependencies=repository_dependencies )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
- if install_repository_dependencies:
- created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
- reinstalling=True,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
- if len( created_or_updated_tool_shed_repositories ) > 1:
- # Build repository dependency relationships.
- suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
- else:
- filtered_repo_info_dicts = [ repo_info_dict for repo_info_dict in repo_info_dicts ]
- created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
- tool_panel_section_keys.append( tool_panel_section_key )
+ created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
+ shed_util.create_repository_dependency_objects( trans,
+ tool_path,
+ tool_shed_url,
+ repo_info_dicts,
+ reinstalling=True,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
# Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2ec609faae49/
changeset: 2ec609faae49
user: greg
date: 2013-01-08 19:50:16
summary: When installing a new repository from the too shed repository to Galaxy or reinstalling an uninstalled repository, handle tool dependencies defined for repository dependencies defined for the repository being installed.
affected #: 6 files
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -653,6 +653,22 @@
tool_dependencies = None
missing_tool_dependencies = None
return tool_dependencies, missing_tool_dependencies
+def get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies ):
+ """Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy."""
+ # FIXME: this method currently populates and returns only missing tool dependencies since tool dependencies defined for complex repository dependency
+ # relationships is not currently supported. This method should be enhanced to search for installed tool dependencies defined as complex repository
+ # dependency relationships when that feature is implemented.
+ if all_tool_dependencies:
+ tool_dependencies = {}
+ missing_tool_dependencies = {}
+ for td_key, val in all_tool_dependencies.items():
+ # Since we have a new install, missing tool dependencies have never been installed.
+ val[ 'status' ] = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ missing_tool_dependencies[ td_key ] = val
+ else:
+ tool_dependencies = None
+ missing_tool_dependencies = None
+ return tool_dependencies, missing_tool_dependencies
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -667,7 +683,7 @@
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
- this methid is required to retrieve all repository dependencies.
+ this method is required to retrieve all repository dependencies.
"""
all_repo_info_dicts = []
if repo_info_dicts:
@@ -1035,7 +1051,7 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False, required_repo_info_dicts=None ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This method is called only
from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled
@@ -1064,15 +1080,51 @@
readme_files_dict = None
# Handle repository dependencies.
installed_repository_dependencies, missing_repository_dependencies = get_installed_and_missing_repository_dependencies( trans, repository )
- # Handle tool dependencies.
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- installed_tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ # Handle the current repository's tool dependencies.
+ repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies( trans, repository, repository_tool_dependencies )
if reinstalling:
- # All tool dependencies will be considered missing since we are reinstalling the repository.
- if installed_tool_dependencies:
- for td in installed_tool_dependencies:
- missing_tool_dependencies.append( td )
- installed_tool_dependencies = None
+ installed_tool_dependencies = None
+ missing_tool_dependencies = None
+ if repository_installed_tool_dependencies is None:
+ repository_installed_tool_dependencies = {}
+ if repository_missing_tool_dependencies is None:
+ repository_missing_tool_dependencies = {}
+ if required_repo_info_dicts:
+ # Handle the tool dependencies defined for each of the repository's repository dependencies.
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
+ required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, repository_owner, changeset_revision )
+ if not required_repository:
+ # The required_repository may have been installed with a different changeset revision.
+ required_repository, installed_changeset_revision = repository_was_previously_installed( trans,
+ tool_shed_url,
+ name,
+ repo_info_tuple,
+ clone_dir )
+ if required_repository:
+ required_repository_installed_tool_dependencies, required_repository_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies( trans, required_repository, tool_dependencies )
+ if required_repository_installed_tool_dependencies:
+ for td_key, td_dict in required_repository_installed_tool_dependencies.items():
+ if td_key not in repository_installed_tool_dependencies:
+ repository_installed_tool_dependencies[ td_key ] = td_dict
+ if required_repository_missing_tool_dependencies:
+ for td_key, td_dict in required_repository_missing_tool_dependencies.items():
+ if td_key not in repository_missing_tool_dependencies:
+ repository_missing_tool_dependencies[ td_key ] = td_dict
+ if repository_installed_tool_dependencies:
+ installed_tool_dependencies = repository_installed_tool_dependencies
+ if repository_missing_tool_dependencies:
+ missing_tool_dependencies = repository_missing_tool_dependencies
+ else:
+ installed_tool_dependencies = repository_installed_tool_dependencies
+ missing_tool_dependencies = repository_missing_tool_dependencies
# Handle valid tools.
valid_tools = metadata.get( 'tools', None )
# Handle workflows.
@@ -1092,7 +1144,8 @@
tool_dependencies=installed_tool_dependencies,
valid_tools=valid_tools,
workflows=workflows,
- new_install=False )
+ new_install=False,
+ reinstalling=reinstalling )
else:
containers_dict = dict( datatypes=None,
invalid_tools=None,
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -78,7 +78,7 @@
return readme_files_dict
def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes,
invalid_tools, missing_repository_dependencies, missing_tool_dependencies, readme_files_dict,
- repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False ):
+ repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False, reinstalling=False ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -160,7 +160,8 @@
tool_dependencies,
label=label,
missing=False,
- new_install=new_install )
+ new_install=new_install,
+ reinstalling=reinstalling )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
@@ -170,7 +171,8 @@
missing_tool_dependencies,
label='Missing tool dependencies',
missing=True,
- new_install=new_install )
+ new_install=new_install,
+ reinstalling=reinstalling )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
@@ -790,7 +792,7 @@
Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy from two places:
+ This method is called from Galaxy in two places:
1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
repository and repository_metadata will be objects.
2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
@@ -2414,6 +2416,47 @@
removed = True
error_message = ''
return removed, error_message
+def repository_dependencies_have_tool_dependencies( trans, repository_dependencies ):
+ """
+ repository_dependencies':
+ {'http://localhost:9009__ESEP__emboss_6__ESEP__test__ESEP__92bedb60b0c9':
+ [['http://localhost:9009', 'emboss_datatypes', 'test', '27df73fe48a6']],
+ 'root_key': 'http://localhost:9009__ESEP__emboss__ESEP__test__ESEP__06d729cb3f34',
+ 'description': 'required to enable emboss 6 tools',
+ 'http://localhost:9009__ESEP__emboss__ESEP__test__ESEP__06d729cb3f34':
+ [['http://localhost:9009', 'emboss_6', 'test', '92bedb60b0c9']]}}
+ """
+ rd_tups_processed = []
+ for key, rd_tups in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ rd_tup = container_util.get_components_from_key( key )
+ if rd_tup not in rd_tups_processed:
+ toolshed, name, owner, changeset_revision = rd_tup
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ return True
+ rd_tups_processed.append( rd_tup )
+ for rd_tup in rd_tups:
+ if rd_tup not in rd_tups_processed:
+ toolshed, name, owner, changeset_revision = rd_tup
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ return True
+ rd_tups_processed.append( rd_tup )
+ return False
def reset_all_metadata_on_installed_repository( trans, id ):
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
repository = get_installed_tool_shed_repository( trans, id )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -786,9 +786,9 @@
operation='preview_tools_in_changeset',
repository_id=repository_id )
self.valid_repository_grid.operations = [ grids.GridOperation( "Preview and install",
- url_args=url_args,
- allow_multiple=False,
- async_compatible=False ) ]
+ url_args=url_args,
+ allow_multiple=False,
+ async_compatible=False ) ]
return self.valid_repository_grid( trans, **kwd )
def __build_allow_push_select_field( self, trans, current_push_list, selected_value='none' ):
options = []
@@ -1410,7 +1410,10 @@
repo_info_dicts=repo_info_dicts )
@web.json
def get_required_repo_info_dict( self, trans, encoded_str ):
- """Retrive a list of dictionaries that each contain all of the information needed to install the list of repositories defined by encoded_str."""
+ """
+ Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
+ repositories defined by the received encoded_str.
+ """
encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
decoded_required_repository_tups = []
@@ -1430,7 +1433,7 @@
return repo_info_dict
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
- """Handle a request from the InstallManager of a local Galaxy instance."""
+ """Handle a request from a Galaxy instance."""
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -291,7 +291,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False, reinstalling=False ):
"""Return a folder hierarchy containing tool dependencies."""
# The status will be displayed only if the received value for missing is True. When this is the case, we're in Galaxy (not the tool shed)
# and the tool dependencies are not installed or are in an error state, so they are considered missing. The tool dependency status will be
@@ -304,7 +304,9 @@
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
if trans.webapp.name == 'galaxy':
- if missing:
+ if reinstalling:
+ folder.description = "this repository's tools require handling of these dependencies"
+ elif missing and not reinstalling:
folder.description = 'click the name to install the missing dependency'
else:
if new_install:
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1024,6 +1024,8 @@
decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd )
tsr_ids = decoded_kwd[ 'tool_shed_repository_ids' ]
tool_panel_section_keys = decoded_kwd[ 'tool_panel_section_keys' ]
+ repo_info_dicts = decoded_kwd[ 'repo_info_dicts' ]
+ filtered_repo_info_dicts = []
filtered_tool_panel_section_keys = []
repositories_for_installation = []
for index, tsr_id in enumerate( tsr_ids ):
@@ -1031,8 +1033,10 @@
if repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
+ filtered_repo_info_dicts.append( repo_info_dicts[ index ] )
filtered_tool_panel_section_keys.append( tool_panel_section_keys[ index ] )
if repositories_for_installation:
+ decoded_kwd[ 'repo_info_dicts' ] = filtered_repo_info_dicts
decoded_kwd[ 'tool_panel_section_keys' ] = filtered_tool_panel_section_keys
self.install_tool_shed_repositories( trans, repositories_for_installation, reinstalling=reinstalling, **decoded_kwd )
else:
@@ -1152,7 +1156,6 @@
return trans.show_error_message( message )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
tool_shed_url = kwd[ 'tool_shed_url' ]
# Handle repository dependencies.
includes_repository_dependencies = util.string_as_bool( kwd.get( 'includes_repository_dependencies', False ) )
@@ -1259,7 +1262,6 @@
shed_tool_conf=shed_tool_conf,
status=status,
tool_path=tool_path,
-
tool_panel_section_keys=tool_panel_section_keys,
tool_shed_repository_ids=tsrids_list,
tool_shed_url=tool_shed_url )
@@ -1300,9 +1302,10 @@
raw_text = response.read()
response.close()
readme_files_dict = json.from_json_string( raw_text )
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
if repository_dependencies:
+ missing_tool_dependencies = {}
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # In this case, a record for the repository will exist in the database with the status of 'New'.
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
installed_repository_dependencies, missing_repository_dependencies = \
@@ -1310,9 +1313,48 @@
else:
installed_repository_dependencies, missing_repository_dependencies = \
shed_util.get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ all_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ for td_key, td_dict in tool_dependencies.items():
+ if td_key not in all_tool_dependencies:
+ all_tool_dependencies[ td_key ] = td_dict
+ if all_tool_dependencies:
+ if tool_dependencies is None:
+ tool_dependencies = {}
+ else:
+ # Move all tool dependencies to the missing_tool_dependencies container.
+ for td_key, td_dict in tool_dependencies.items():
+ if td_key not in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ] = td_dict
+ tool_dependencies = {}
+ # Discover and categorize all tool dependencies defined this this repository's repository dependencies.
+ required_tool_dependencies, required_missing_tool_dependencies = \
+ shed_util.get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
+ if required_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_tool_dependencies.items():
+ if td_key not in tool_dependencies:
+ tool_dependencies[ td_key ] = td_dict
+ if required_missing_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_tool_dependencies.items():
+ if td_key not in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ] = td_dict
else:
installed_repository_dependencies = None
missing_repository_dependencies = None
+ missing_tool_dependencies = None
+ required_repo_info_dicts = None
# Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
# are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
@@ -1324,13 +1366,14 @@
datatypes=None,
invalid_tools=None,
missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=None,
+ missing_tool_dependencies=missing_tool_dependencies,
readme_files_dict=readme_files_dict,
repository_dependencies=installed_repository_dependencies,
tool_dependencies=tool_dependencies,
valid_tools=None,
workflows=None,
- new_install=True )
+ new_install=True,
+ reinstalling=False )
# We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we're retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
@@ -1338,6 +1381,7 @@
# each displayed repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
else:
+ # FIXME: support the intallation of repository dependencies and tool dependencies for a list of tool shed repositories being installed.
containers_dict = dict( datatypes=None,
invalid_tools=None,
readme_files_dict=None,
@@ -1395,6 +1439,8 @@
tool_panel_section_key = None
tool_panel_section_keys = []
metadata = tool_shed_repository.metadata
+ # Keep track of tool dependencies define dfor the current repository or those defined for any of it's repository dependencies.
+ includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
if tool_shed_repository.includes_tools:
# Handle the selected tool panel location for loading tools included in the tool shed repository.
tool_section, new_tool_panel_section, tool_panel_section_key = \
@@ -1422,8 +1468,7 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
- # Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
- # were introduced, it may never happen, but we'll keep the block just in case.
+ # Entering this else block occurs only if the tool_shed_repository does not include any valid tools.
if install_repository_dependencies:
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
@@ -1462,13 +1507,13 @@
filtered_repo_info_dicts = [ repo_info_dict for repo_info_dict in repo_info_dicts ]
created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
tool_panel_section_keys.append( tool_panel_section_key )
- # Defaulot the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
+ # Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
if tps_key is None:
tool_panel_section_keys[ index ] = tool_panel_section_key
encoded_repository_ids = [ trans.security.encode_id( r.id ) for r in created_or_updated_tool_shed_repositories ]
- new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
+ new_kwd = dict( includes_tool_dependencies=includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
install_tool_dependencies=install_tool_dependencies,
repo_info_dicts=filtered_repo_info_dicts,
@@ -1539,13 +1584,17 @@
repository_metadata=None,
metadata=metadata,
repository_dependencies=repository_dependencies )
+ if includes_repository_dependencies:
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ else:
+ required_repo_info_dicts = None
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if tool_panel_dict:
if shed_util.panel_entry_per_tool( tool_panel_dict ):
- # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
- # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ # The following forces everything to be loaded into 1 section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
tool_section_dict = tool_section_dicts[ 0 ]
original_section_name = tool_section_dict[ 'name' ]
@@ -1567,7 +1616,14 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, tool_shed_repository, reinstalling=True )
+ # Populate the containers_dict from the metadata for the tool shed repository we're reinstalling, but make sure to include tool dependencies defined for
+ # all of the repository's repository dependencies.
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ repository=tool_shed_repository,
+ reinstalling=True,
+ required_repo_info_dicts=required_repo_info_dicts )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -91,6 +91,7 @@
repository_dependencies_root_folder = containers_dict[ 'repository_dependencies' ]
tool_dependencies_root_folder = containers_dict[ 'tool_dependencies' ]
+ missing_tool_dependencies_root_folder = containers_dict[ 'missing_tool_dependencies' ]
env_settings_heaader_row_displayed = False
package_header_row_displayed = False
%>
@@ -122,7 +123,7 @@
<div style="clear: both"></div></div>
%endif
- %if tool_dependencies_root_folder:
+ %if tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
%if install_tool_dependencies_check_box is not None:
<div class="form-row"><label>Handle tool dependencies?</label>
@@ -138,14 +139,26 @@
</div><div style="clear: both"></div>
%endif
- <div class="form-row">
- <p/>
- <% row_counter = RowCounter() %>
- <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
- ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
- </table>
- <div style="clear: both"></div>
- </div>
+ %if tool_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+ ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ %if missing_tool_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+ ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
%endif
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/530fb4f8204f/
changeset: 530fb4f8204f
user: dan
date: 2013-01-08 16:41:32
summary: Fix for grouping parameters value_from_basic when ignore_errors is True. Fixes issue seen in workflows and rerun where an invalid stored value (e.g. due to changing parameter types) is provided.
affected #: 1 file
diff -r 94bfcccdaf0625448e623d1d88ba9a1b7785ee78 -r 530fb4f8204f2106e11f419c381fd53d2319ce24 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -68,21 +68,25 @@
return rval
def value_from_basic( self, value, app, ignore_errors=False ):
rval = []
- for i, d in enumerate( value ):
- rval_dict = {}
- # If the special __index__ key is not set, create it (for backward
- # compatibility)
- rval_dict['__index__'] = d.get( '__index__', i )
- # Restore child inputs
- for input in self.inputs.itervalues():
- if ignore_errors and input.name not in d:
- # If we do not have a value, and are ignoring errors, we simply
- # do nothing. There will be no value for the parameter in the
- # conditional's values dictionary.
- pass
- else:
- rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
- rval.append( rval_dict )
+ try:
+ for i, d in enumerate( value ):
+ rval_dict = {}
+ # If the special __index__ key is not set, create it (for backward
+ # compatibility)
+ rval_dict['__index__'] = d.get( '__index__', i )
+ # Restore child inputs
+ for input in self.inputs.itervalues():
+ if ignore_errors and input.name not in d:
+ # If we do not have a value, and are ignoring errors, we simply
+ # do nothing. There will be no value for the parameter in the
+ # conditional's values dictionary.
+ pass
+ else:
+ rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+ rval.append( rval_dict )
+ except Exception, e:
+ if not ignore_errors:
+ raise e
return rval
def visit_inputs( self, prefix, value, callback ):
for i, d in enumerate( value ):
@@ -441,24 +445,28 @@
return rval
def value_from_basic( self, value, app, ignore_errors=False ):
rval = dict()
- current_case = rval['__current_case__'] = value['__current_case__']
- # Test param
- if ignore_errors and self.test_param.name not in value:
- # If ignoring errors, do nothing. However this is potentially very
- # problematic since if we are missing the value of test param,
- # the entire conditional is wrong.
- pass
- else:
- rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors )
- # Inputs associated with current case
- for input in self.cases[current_case].inputs.itervalues():
- if ignore_errors and input.name not in value:
- # If we do not have a value, and are ignoring errors, we simply
- # do nothing. There will be no value for the parameter in the
- # conditional's values dictionary.
+ try:
+ current_case = rval['__current_case__'] = value['__current_case__']
+ # Test param
+ if ignore_errors and self.test_param.name not in value:
+ # If ignoring errors, do nothing. However this is potentially very
+ # problematic since if we are missing the value of test param,
+ # the entire conditional is wrong.
pass
else:
- rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+ rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors )
+ # Inputs associated with current case
+ for input in self.cases[current_case].inputs.itervalues():
+ if ignore_errors and input.name not in value:
+ # If we do not have a value, and are ignoring errors, we simply
+ # do nothing. There will be no value for the parameter in the
+ # conditional's values dictionary.
+ pass
+ else:
+ rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+ except Exception, e:
+ if not ignore_errors:
+ raise e
return rval
def visit_inputs( self, prefix, value, callback ):
current_case = value['__current_case__']
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fix bug in bowtie2 wrapper that prevented wrapper from working with old versions of samtools.
by Bitbucket 08 Jan '13
by Bitbucket 08 Jan '13
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/94bfcccdaf06/
changeset: 94bfcccdaf06
user: jgoecks
date: 2013-01-08 15:25:16
summary: Fix bug in bowtie2 wrapper that prevented wrapper from working with old versions of samtools.
affected #: 1 file
diff -r 989c789fbc43b0d9960655752c6ba396ac7618af -r 94bfcccdaf0625448e623d1d88ba9a1b7785ee78 tools/sr_mapping/bowtie2_wrapper.py
--- a/tools/sr_mapping/bowtie2_wrapper.py
+++ b/tools/sr_mapping/bowtie2_wrapper.py
@@ -71,7 +71,7 @@
index_path = options.index_path
# Build bowtie command; use view and sort to create sorted bam.
- cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - sorted > %s'
+ cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - - > %s'
# Set up reads.
if options.single_paired == 'paired':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: comment out two eggs from eggs.ini for now
by Bitbucket 07 Jan '13
by Bitbucket 07 Jan '13
07 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/989c789fbc43/
changeset: 989c789fbc43
user: james_taylor
date: 2013-01-07 22:25:54
summary: comment out two eggs from eggs.ini for now
affected #: 1 file
diff -r d8606b61644a09719a8042efd2b23b7aa7869ff2 -r 989c789fbc43b0d9960655752c6ba396ac7618af eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,7 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
-msgpack_python = 0.2.4
+; msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
@@ -66,7 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
-fluent_logger = 0.3.3
+; fluent_logger = 0.3.3
; extra version information
[tags]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
07 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d8606b61644a/
changeset: d8606b61644a
user: inithello
date: 2013-01-07 22:20:23
summary: Revert fix from 8530:df20658ac499.
affected #: 1 file
diff -r 09cf284087021586ad08656b7ea444959c6c49bf -r d8606b61644a09719a8042efd2b23b7aa7869ff2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -848,7 +848,8 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
tool_section = None
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
@@ -1418,7 +1419,8 @@
repo_info_dicts = []
repo_info_dict = kwd.get( 'repo_info_dict', None )
if repo_info_dict:
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
# Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
# were introduced, it may never happen, but we'll keep the block just in case.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
7 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/67379010fc77/
changeset: 67379010fc77
user: james_taylor
date: 2012-10-02 17:03:48
summary: tracing: first pass trace logging to fluentd
affected #: 9 files
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+msgpack_python = 0.2.2
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+fluent_logger = 0.3.3
; extra version information
[tags]
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -28,6 +28,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -53,7 +54,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -143,6 +145,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -155,3 +158,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -232,6 +232,11 @@
for k, v in amqp_config:
self.amqp[k] = v
self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
+
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
tool_runners_config = global_conf_parser.items( section )
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1868,7 +1868,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1876,10 +1876,10 @@
Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
- # Should we use the logging proxy?
- if database_query_profiling_proxy:
+ # If metlog is enabled, do micrologging
+ if trace_logger:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
- proxy = logging_connection_proxy.LoggingProxy()
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration, stack=pretty_stack() )
+ return rval
\ No newline at end of file
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/util/log/__init__.py
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,5 @@
+class TraceLogger( object ):
+ def __init__( self, name ):
+ self.name = name
+ def log( **kwargs ):
+ raise TypeError( "Abstract Method" )
\ No newline at end of file
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/util/log/fluent_log.py
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,37 @@
+import time
+import threading
+
+import galaxy.eggs
+galaxy.eggs.require( "fluent-logger" )
+galaxy.eggs.require( "msgpack_python" )
+
+
+from fluent.sender import FluentSender
+
+
+class FluentTraceLogger( object ):
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
+
+ def context_push( self, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.thread_local.context.append( value )
+ self.lock.release()
+
+ def context_pop( self ):
+ self.lock.acquire()
+ self.thread_local.context.pop()
+ self.lock.release()
+
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.lock.release()
+ kwargs['log_context'] = self.thread_local.context
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -8,6 +8,8 @@
import os.path
import sys
import tarfile
+import threading
+import uuid
from Cookie import SimpleCookie
@@ -68,6 +70,9 @@
self.mapper.explicit = False
self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
+ # Each request will have a unique id. Since we are assuming
+ # a threaded model for the moment we can store that here
+ self.request_id = threading.local()
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -106,12 +111,20 @@
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
self.api_mapper.create_regs( self.api_controllers.keys() )
+ def trace( self, **fields ):
+ if self.trace_logger:
+ self.trace_logger.log( "WebApplication", **fields )
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
friendly objects, finds the appropriate method to handle the request
and calls it.
"""
+ # Immediately create request_id which we will use for logging
+ self.request_id = request_id = uuid.uuid1().hex
+ if self.trace_logger:
+ self.trace_logger.context_push( dict( request_id = request_id ) )
+ self.trace( message= "Starting request" )
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
@@ -125,6 +138,7 @@
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+ self.trace( path_info=path_info, map=map )
# Setup routes
rc = routes.request_config()
rc.mapper = mapper
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -110,7 +110,13 @@
webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", controller="workflows", action="import_new_workflow", conditions=dict(method=["POST"]))
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+ # Connect logger from app
+ if app.trace_logger:
+ webapp.trace_logger = app.trace_logger
+
+ # Indicate that all configuration settings have been provided
webapp.finalize_config()
+
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
https://bitbucket.org/galaxy/galaxy-central/commits/33d256f3121a/
changeset: 33d256f3121a
user: james_taylor
date: 2013-01-03 22:54:14
summary: merge
affected #: 9 files
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+msgpack_python = 0.2.2
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+fluent_logger = 0.3.3
; extra version information
[tags]
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -29,6 +29,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -54,7 +55,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -149,6 +151,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -161,3 +164,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -261,6 +261,10 @@
self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
# This is for testing new library browsing capabilities.
self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1950,7 +1950,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1958,10 +1958,10 @@
Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
- # Should we use the logging proxy?
- if database_query_profiling_proxy:
+ # If metlog is enabled, do micrologging
+ if trace_logger:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
- proxy = logging_connection_proxy.LoggingProxy()
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration, stack=pretty_stack() )
+ return rval
\ No newline at end of file
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/util/log/__init__.py
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,5 @@
+class TraceLogger( object ):
+ def __init__( self, name ):
+ self.name = name
+ def log( **kwargs ):
+ raise TypeError( "Abstract Method" )
\ No newline at end of file
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/util/log/fluent_log.py
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,37 @@
+import time
+import threading
+
+import galaxy.eggs
+galaxy.eggs.require( "fluent-logger" )
+galaxy.eggs.require( "msgpack_python" )
+
+
+from fluent.sender import FluentSender
+
+
+class FluentTraceLogger( object ):
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
+
+ def context_push( self, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.thread_local.context.append( value )
+ self.lock.release()
+
+ def context_pop( self ):
+ self.lock.acquire()
+ self.thread_local.context.pop()
+ self.lock.release()
+
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.lock.release()
+ kwargs['log_context'] = self.thread_local.context
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -8,6 +8,8 @@
import os.path
import sys
import tarfile
+import threading
+import uuid
from Cookie import SimpleCookie
@@ -68,6 +70,9 @@
self.mapper.explicit = False
self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
+ # Each request will have a unique id. Since we are assuming
+ # a threaded model for the moment we can store that here
+ self.request_id = threading.local()
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -106,12 +111,20 @@
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
self.api_mapper.create_regs( self.api_controllers.keys() )
+ def trace( self, **fields ):
+ if self.trace_logger:
+ self.trace_logger.log( "WebApplication", **fields )
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
friendly objects, finds the appropriate method to handle the request
and calls it.
"""
+ # Immediately create request_id which we will use for logging
+ self.request_id = request_id = uuid.uuid1().hex
+ if self.trace_logger:
+ self.trace_logger.context_push( dict( request_id = request_id ) )
+ self.trace( message= "Starting request" )
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
@@ -125,6 +138,7 @@
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+ self.trace( path_info=path_info, map=map )
# Setup routes
rc = routes.request_config()
rc.mapper = mapper
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -130,7 +130,13 @@
webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", controller="workflows", action="import_new_workflow", conditions=dict(method=["POST"]))
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+ # Connect logger from app
+ if app.trace_logger:
+ webapp.trace_logger = app.trace_logger
+
+ # Indicate that all configuration settings have been provided
webapp.finalize_config()
+
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
https://bitbucket.org/galaxy/galaxy-central/commits/33750f347be2/
changeset: 33750f347be2
user: james_taylor
date: 2013-01-04 20:26:22
summary: update msgpack_python version for trace logging, don't log entire stack with each query
affected #: 2 files
diff -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,7 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
-msgpack_python = 0.2.2
+msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
diff -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -44,5 +44,5 @@
duration = time.clock() - start
self.trace_logger.log( "sqlalchemy_query",
message="Query executed", statement=statement, parameters=parameters,
- executemany=executemany, duration=duration, stack=pretty_stack() )
+ executemany=executemany, duration=duration )
return rval
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/commits/8331f2af6a90/
changeset: 8331f2af6a90
user: james_taylor
date: 2013-01-04 22:48:35
summary: trace logging: flatten context
affected #: 2 files
diff -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a -r 8331f2af6a9092f4528fb01ca7baf3d7999838a0 lib/galaxy/util/log/fluent_log.py
--- a/lib/galaxy/util/log/fluent_log.py
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -1,3 +1,7 @@
+"""
+Provides a `TraceLogger` implementation that logs to a fluentd collector
+"""
+
import time
import threading
@@ -5,33 +9,31 @@
galaxy.eggs.require( "fluent-logger" )
galaxy.eggs.require( "msgpack_python" )
-
from fluent.sender import FluentSender
class FluentTraceLogger( object ):
- def __init__( self, name, host='localhost', port=24224 ):
- self.lock = threading.Lock()
- self.thread_local = threading.local()
- self.name = name
- self.sender = FluentSender( self.name, host=host, port=port )
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
- def context_push( self, value ):
- self.lock.acquire()
- if not hasattr( self.thread_local, 'context' ):
- self.thread_local.context = []
- self.thread_local.context.append( value )
- self.lock.release()
+ def context_set( self, key, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = {}
+ self.thread_local.context[key] = value
+ self.lock.release()
- def context_pop( self ):
- self.lock.acquire()
- self.thread_local.context.pop()
- self.lock.release()
+ def context_remove( self, key ):
+ self.lock.acquire()
+ del self.thread_local.context[key]
+ self.lock.release()
- def log( self, label, **kwargs ):
- self.lock.acquire()
- if not hasattr( self.thread_local, 'context' ):
- self.thread_local.context = []
- self.lock.release()
- kwargs['log_context'] = self.thread_local.context
- self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if hasattr( self.thread_local, 'context' ):
+ kwargs.update( self.thread_local.context )
+ self.lock.release()
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a -r 8331f2af6a9092f4528fb01ca7baf3d7999838a0 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -123,8 +123,16 @@
# Immediately create request_id which we will use for logging
self.request_id = request_id = uuid.uuid1().hex
if self.trace_logger:
- self.trace_logger.context_push( dict( request_id = request_id ) )
- self.trace( message= "Starting request" )
+ self.trace_logger.context_set( "request_id", request_id )
+ self.trace( message="Starting request" )
+ try:
+ return self.handle_request( environ, start_response )
+ finally:
+ self.trace( message="Handle request finished" )
+ if self.trace_logger:
+ self.trace_logger.context_remove( "request_id" )
+
+ def handle_request( self, environ, start_response ):
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
https://bitbucket.org/galaxy/galaxy-central/commits/26f38d9bd3ee/
changeset: 26f38d9bd3ee
user: james_taylor
date: 2013-01-07 21:07:16
summary: Restore support for logging connection proxy
affected #: 1 file
diff -r 8331f2af6a9092f4528fb01ca7baf3d7999838a0 -r 26f38d9bd3ee9902aa5661d046604f6fb2bb96b3 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1958,8 +1958,12 @@
Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
+ # Should we use the logging proxy?
+ if database_query_profiling_proxy:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.LoggingProxy()
# If metlog is enabled, do micrologging
- if trace_logger:
+ elif trace_logger:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
https://bitbucket.org/galaxy/galaxy-central/commits/5622f8127e9d/
changeset: 5622f8127e9d
user: james_taylor
date: 2013-01-07 21:12:18
summary: tracing: fix for when trace logger is disabled
affected #: 1 file
diff -r 26f38d9bd3ee9902aa5661d046604f6fb2bb96b3 -r 5622f8127e9ddf52baba4ad901e32deacd378f7c lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -73,6 +73,8 @@
# Each request will have a unique id. Since we are assuming
# a threaded model for the moment we can store that here
self.request_id = threading.local()
+ # Set if trace logging is enabled
+ self.trace_logger = None
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -114,6 +116,7 @@
def trace( self, **fields ):
if self.trace_logger:
self.trace_logger.log( "WebApplication", **fields )
+
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
https://bitbucket.org/galaxy/galaxy-central/commits/09cf28408702/
changeset: 09cf28408702
user: james_taylor
date: 2013-01-07 21:12:47
summary: Automated merge with ssh://bitbucket.org/galaxy/galaxy-central
affected #: 9 files
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+fluent_logger = 0.3.3
; extra version information
[tags]
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -29,6 +29,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -54,7 +55,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -149,6 +151,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -161,3 +164,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -261,6 +261,10 @@
self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
# This is for testing new library browsing capabilities.
self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1950,7 +1950,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1962,6 +1962,10 @@
if database_query_profiling_proxy:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
proxy = logging_connection_proxy.LoggingProxy()
+ # If metlog is enabled, do micrologging
+ elif trace_logger:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration )
+ return rval
\ No newline at end of file
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/util/log/__init__.py
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,5 @@
+class TraceLogger( object ):
+ def __init__( self, name ):
+ self.name = name
+ def log( **kwargs ):
+ raise TypeError( "Abstract Method" )
\ No newline at end of file
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/util/log/fluent_log.py
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,39 @@
+"""
+Provides a `TraceLogger` implementation that logs to a fluentd collector
+"""
+
+import time
+import threading
+
+import galaxy.eggs
+galaxy.eggs.require( "fluent-logger" )
+galaxy.eggs.require( "msgpack_python" )
+
+from fluent.sender import FluentSender
+
+
+class FluentTraceLogger( object ):
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
+
+ def context_set( self, key, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = {}
+ self.thread_local.context[key] = value
+ self.lock.release()
+
+ def context_remove( self, key ):
+ self.lock.acquire()
+ del self.thread_local.context[key]
+ self.lock.release()
+
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if hasattr( self.thread_local, 'context' ):
+ kwargs.update( self.thread_local.context )
+ self.lock.release()
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -8,6 +8,8 @@
import os.path
import sys
import tarfile
+import threading
+import uuid
from Cookie import SimpleCookie
@@ -68,6 +70,11 @@
self.mapper.explicit = False
self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
+ # Each request will have a unique id. Since we are assuming
+ # a threaded model for the moment we can store that here
+ self.request_id = threading.local()
+ # Set if trace logging is enabled
+ self.trace_logger = None
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -106,12 +113,29 @@
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
self.api_mapper.create_regs( self.api_controllers.keys() )
+ def trace( self, **fields ):
+ if self.trace_logger:
+ self.trace_logger.log( "WebApplication", **fields )
+
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
friendly objects, finds the appropriate method to handle the request
and calls it.
"""
+ # Immediately create request_id which we will use for logging
+ self.request_id = request_id = uuid.uuid1().hex
+ if self.trace_logger:
+ self.trace_logger.context_set( "request_id", request_id )
+ self.trace( message="Starting request" )
+ try:
+ return self.handle_request( environ, start_response )
+ finally:
+ self.trace( message="Handle request finished" )
+ if self.trace_logger:
+ self.trace_logger.context_remove( "request_id" )
+
+ def handle_request( self, environ, start_response ):
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
@@ -125,6 +149,7 @@
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+ self.trace( path_info=path_info, map=map )
# Setup routes
rc = routes.request_config()
rc.mapper = mapper
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -130,7 +130,13 @@
webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", controller="workflows", action="import_new_workflow", conditions=dict(method=["POST"]))
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+ # Connect logger from app
+ if app.trace_logger:
+ webapp.trace_logger = app.trace_logger
+
+ # Indicate that all configuration settings have been provided
webapp.finalize_config()
+
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/df20658ac499/
changeset: df20658ac499
user: inithello
date: 2013-01-07 18:19:00
summary: Fix for encoded repo_info_dict being passed instead of decoded.
affected #: 1 file
diff -r ce62bf5a91f86d0c53764bda3df4975486d7512e -r df20658ac4991d970bfd9d63feae73850db84bb7 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -848,8 +848,7 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
tool_section = None
- if isinstance( repo_info_dict, basestring ):
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
@@ -1419,8 +1418,7 @@
repo_info_dicts = []
repo_info_dict = kwd.get( 'repo_info_dict', None )
if repo_info_dict:
- if isinstance( repo_info_dict, basestring ):
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
# Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
# were introduced, it may never happen, but we'll keep the block just in case.
@@ -1442,12 +1440,9 @@
repository_metadata=None,
metadata=metadata,
repository_dependencies=repository_dependencies )
- repo_info_dict = encoding_util.tool_shed_encode( repo_info_dict )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
if install_repository_dependencies:
- # This is a bit screwy because filtered_repo_info_dicts in this block is a list of tool_shed_encoded dictionaries, but
- # in the associated else block, it is a list of unencoded dictionaries - not sure if this should be corrected...
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
shed_util.create_repository_dependency_objects( trans,
tool_path,
https://bitbucket.org/galaxy/galaxy-central/commits/395bfeb484ae/
changeset: 395bfeb484ae
user: inithello
date: 2013-01-07 18:21:46
summary: Tool shed functional test enhancements - more circular repository dependency tests.
affected #: 7 files
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -43,6 +43,10 @@
galaxy.model.ToolShedRepository.table.c.owner == owner,
galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
+def get_installed_repository_by_id( repository_id ):
+ return ga_session.query( galaxy.model.ToolShedRepository ) \
+ .filter( galaxy.model.ToolShedRepository.table.c.id == repository_id ) \
+ .first()
def get_installed_repository_by_name_owner( repository_name, owner ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
@@ -53,6 +57,10 @@
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_repository_by_id( repository_id ):
+ return sa_session.query( model.Repository ) \
+ .filter( model.Repository.table.c.id == repository_id ) \
+ .first()
def get_user( email ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.email==email ) \
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,14 +1,16 @@
import galaxy.webapps.community.util.hgweb_config
import galaxy.model as galaxy_model
-import common, string, os, re, test_db_util, simplejson
+import common, string, os, re, test_db_util, simplejson, logging, time
import galaxy.util as util
from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
-from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
from galaxy import eggs
eggs.require('mercurial')
from mercurial import hg, ui
+log = logging.getLogger( __name__ )
+
class ShedTwillTestCase( TwillTestCase ):
def setUp( self ):
# Security helper
@@ -429,7 +431,7 @@
if workflow_name not in strings_displayed:
strings_displayed.append( workflow_name )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def initiate_installation_process( self ):
+ def initiate_installation_process( self, install_tool_dependencies=False, install_repository_dependencies=True ):
html = self.last_page()
# Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
# installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
@@ -438,11 +440,20 @@
install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
if install_parameters:
iri_ids = install_parameters.group(1)
- encoded_kwd = install_parameters.group(2)
+ # In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']"
+ # This ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id will handle them correctly.
+ repository_ids = str( iri_ids )
+ repository_ids = re.sub( '[^a-fA-F0-9,]+', '', repository_ids )
+ decoded_kwd = tool_shed_decode( install_parameters.group(2) )
+ if 'install_tool_dependencies' in decoded_kwd:
+ decoded_kwd[ 'install_tool_dependencies' ] = install_tool_dependencies
+ if 'install_repository_dependencies' in decoded_kwd:
+ decoded_kwd[ 'install_repository_dependencies' ] = install_repository_dependencies
reinstalling = install_parameters.group(3)
url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
- ( iri_ids, encoded_kwd, reinstalling )
+ ( ','.join( util.listify( repository_ids ) ), tool_shed_encode( decoded_kwd ), reinstalling )
self.visit_galaxy_url( url )
+ return util.listify( repository_ids )
def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
changeset_revision=None, strings_displayed=[], strings_not_displayed=[],
preview_strings_displayed=[], post_submit_strings_displayed=[], **kwd ):
@@ -469,8 +480,8 @@
kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
- self.initiate_installation_process()
- self.wait_for_repository_installation( repository, changeset_revision )
+ repository_ids = self.initiate_installation_process( install_tool_dependencies )
+ self.wait_for_repository_installation( repository_ids )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
@@ -510,12 +521,17 @@
self.visit_galaxy_url( url )
strings_displayed = [ installed_repository.name, 'repository has been activated' ]
self.check_for_strings( strings_displayed, [] )
- def reinstall_repository( self, installed_repository ):
- url = '/admin_toolshed/reinstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
+ def reinstall_repository( self,
+ installed_repository,
+ install_repository_dependencies='true',
+ install_tool_dependencies='false' ):
+ url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
- self.initiate_installation_process()
- tool_shed_repository = test_db_util.get_repository_by_name_and_owner( installed_repository.name, installed_repository.owner )
- self.wait_for_repository_installation( tool_shed_repository, installed_repository.installed_changeset_revision )
+ url = '/admin_toolshed/reinstall_repository?id=%s&install_repository_dependencies=%s&install_repository_dependencies=%s' % \
+ ( self.security.encode_id( installed_repository.id ), install_repository_dependencies, install_repository_dependencies )
+ self.visit_galaxy_url( url )
+ repository_ids = self.initiate_installation_process( install_tool_dependencies, install_repository_dependencies )
+ self.wait_for_repository_installation( repository_ids )
def repository_is_new( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
@@ -636,22 +652,18 @@
def visit_galaxy_url( self, url ):
url = '%s%s' % ( self.galaxy_url, url )
self.visit_url( url )
- def wait_for_repository_installation( self, repository, changeset_revision ):
+ def wait_for_repository_installation( self, repository_ids ):
final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
- galaxy_model.ToolShedRepository.installation_status.INSTALLED,
- galaxy_model.ToolShedRepository.installation_status.UNINSTALLED,
- galaxy_model.ToolShedRepository.installation_status.DEACTIVATED ]
- repository_name = repository.name
- owner = repository.user.username
- if changeset_revision is None:
- changeset_revision = self.get_repository_tip( repository )
- galaxy_repository = test_db_util.get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
- timeout_counter = 0
- while galaxy_repository.status not in final_states:
- ga_refresh( galaxy_repository )
- timeout_counter = timeout_counter + 1
- if timeout_counter > common.repository_installation_timeout:
- raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
- ( timeout_counter, repository.status ) )
- break
- time.sleep( 1 )
+ galaxy_model.ToolShedRepository.installation_status.INSTALLED ]
+ if repository_ids:
+ for repository_id in repository_ids:
+ galaxy_repository = test_db_util.get_installed_repository_by_id( self.security.decode_id( repository_id ) )
+ timeout_counter = 0
+ while galaxy_repository.status not in final_states:
+ test_db_util.ga_refresh( galaxy_repository )
+ timeout_counter = timeout_counter + 1
+ if timeout_counter > common.repository_installation_timeout:
+ raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
+ ( timeout_counter, repository.status ) )
+ break
+ time.sleep( 1 )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -2,8 +2,8 @@
import tool_shed.base.test_db_util as test_db_util
freebayes_repository_name = 'freebayes_0040'
-freebayes_repository_name_description = "Galaxy's freebayes tool"
-freebayes_repository_name_long_description = "Long description of Galaxy's freebayes tool"
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
filtering_repository_name = 'filtering_0040'
filtering_repository_description = "Galaxy's filtering tool"
@@ -31,8 +31,8 @@
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.get_or_create_repository( name=freebayes_repository_name,
- description=freebayes_repository_name_description,
- long_description=freebayes_repository_name_long_description,
+ description=freebayes_repository_description,
+ long_description=freebayes_repository_long_description,
owner=common.test_user_1_name,
categories=[ 'test_0040_repository_circular_dependencies' ],
strings_displayed=[] )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/functional/test_0080_advanced_circular_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0080_advanced_circular_dependencies.py
@@ -0,0 +1,89 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_0080'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0080'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0080 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+class TestRepositoryCircularDependencies( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_initiate_category_repositories( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ def test_0020_create_repository_dependencies( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of freebayes to the filtering_0040 repository.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ convert_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Column maker depends on the convert repository.' )
+ self.upload_file( column_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on convert' )
+ def test_0025_create_dependency_on_filtering( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ column_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Convert chars depends on the column_maker repository.' )
+ self.upload_file( convert_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on column' )
+ def test_0030_verify_repository_dependencies( self ):
+ '''Verify that each repository can depend on the other without causing an infinite loop.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( convert_repository, column_repository, self.get_repository_tip( column_repository ) )
+ self.check_repository_dependency( column_repository, convert_repository, self.get_repository_tip( convert_repository ) )
+ def test_0035_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ for repository in [ column_repository, convert_repository ]:
+ self.verify_unchanged_repository_metadata( repository )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
@@ -0,0 +1,314 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_0080'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0080'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0080 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+ def test_0000_create_or_login_admin_user( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_initiate_test_data( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ convert_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Column maker depends on the convert repository.' )
+ self.upload_file( column_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on convert' )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ column_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Convert chars depends on the column_maker repository.' )
+ self.upload_file( convert_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on column' )
+ def test_0010_install_repositories( self ):
+ '''Install convert_chars with repository dependencies check box - this should install both convert_chars and column_maker.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( convert_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies='Yes',
+ new_tool_panel_section='test_1080' )
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ browse_strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision ]
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ strings_displayed.append( 'Installed repository dependencies' )
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0015_deactivate_convert_repository( self ):
+ '''Deactivate convert_chars - this should display column_maker as installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=False )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Deactivated' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0020_reactivate_convert_repository( self ):
+ '''Activate convert_chars - this should display both convert_chars and column_maker as installed with a green box'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reactivate_repository( installed_convert_repository )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ def test_0025_deactivate_column_repository( self ):
+ '''Deactivate column_maker - this should display convert_chars installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=False )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Deactivated' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0030_deactivate_convert_repository( self ):
+ '''Deactivate convert_chars - both convert_chars and column_maker are deactivated'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0035_reactivate_column_repository( self ):
+ '''Activate column_maker - this should not automatically activate convert_chars, so column_maker should be displayed as installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reactivate_repository( installed_column_repository )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Deactivated' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0040_reactivate_convert_repository( self ):
+ '''Activate convert_chars - this should display both convert_chars and column_maker as installed with a green box'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reactivate_repository( installed_convert_repository )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0045_uninstall_column_repository( self ):
+ '''Uninstall column_maker - this should display convert_chars installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Uninstalled' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0050_reinstall_column_repository( self ):
+ '''Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_column_repository, install_repository_dependencies=False )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0055_uninstall_convert_repository( self ):
+ '''Uninstall convert_chars, verify column_maker installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Uninstalled' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0060_uninstall_column_repository( self ):
+ '''Uninstall column_maker - both convert_chars and column_maker are uninstalled'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Activate or reinstall repository',
+ 'Uninstalled' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0065_reinstall_convert_repository( self ):
+ '''Reinstall convert_chars and check the handle repository dependencies check box - this should install both convert_chars and column_maker ( make sure )'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_convert_repository, install_repository_dependencies=True )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/test_data/column_maker/column_maker.tar
Binary file test/tool_shed/test_data/column_maker/column_maker.tar has changed
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/test_data/convert_chars/convert_chars.tar
Binary file test/tool_shed/test_data/convert_chars/convert_chars.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0