galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
January 2013
- 1 participants
- 160 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/be29d1334c08/
changeset: be29d1334c08
user: jgoecks
date: 2013-01-09 15:07:58
summary: Add missing import.
affected #: 1 file
diff -r 66bbee3c9c02b343c4ab621d39efa261b261f237 -r be29d1334c08ec3e32290feab9a6da7bda9862ec lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -5,6 +5,7 @@
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesVisualizationMixin, VALID_SLUG_RE
from galaxy.web.framework.helpers import time_ago, grids, iff
+from galaxy import util
from galaxy.util.json import from_json_string
from galaxy.util.sanitize_html import sanitize_html
from galaxy.visualization.genomes import decode_dbkey
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle repository dependency objects appropriately if the user elected to not install them.
by Bitbucket 08 Jan '13
by Bitbucket 08 Jan '13
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/66bbee3c9c02/
changeset: 66bbee3c9c02
user: greg
date: 2013-01-08 22:30:14
summary: Handle repository dependency objects appropriately if the user elected to not install them.
affected #: 2 files
diff -r 2ec609faae49b4ab32190403669849f1af0b68f6 -r 66bbee3c9c02b343c4ab621d39efa261b261f237 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,7 +2,7 @@
from galaxy.datatypes import checkers
from galaxy.web import url_for
from galaxy import util
-from galaxy.util.json import from_json_string, to_json_string
+from galaxy.util import json
from galaxy.webapps.community.util import container_util
import shed_util_common as suc
import galaxy.tools
@@ -184,19 +184,20 @@
instance and when uninstalled repositories are being reinstalled.
"""
message = ''
- # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys.
+ # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository
+ # dependencies that may not be installed.
+ all_created_or_updated_tool_shed_repositories = []
+ # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys. The following
+ # list will filter out repository dependencies that are not to be installed.
created_or_updated_tool_shed_repositories = []
tool_panel_section_keys = []
# Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
filtered_repo_info_dicts = []
- if install_repository_dependencies:
- # Discover all repository dependencies and retrieve information for installing them.
- all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
- if not all_repo_info_dicts:
- # No repository dependencies were discovered so process the received repositories.
- all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
- else:
- # The user chose to not install repository dependencies, so process the received repositories.
+ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies,
+ # we have to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
+ all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+ if not all_repo_info_dicts:
+ # No repository dependencies were discovered so process the received repositories.
all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
for repo_info_dict in all_repo_info_dicts:
for name, repo_info_tuple in repo_info_dict.items():
@@ -214,16 +215,19 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
if reinstalling or install_repository_dependencies:
- if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
- can_update = True
- name = installed_tool_shed_repository.name
- description = installed_tool_shed_repository.description
- installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
- metadata_dict = installed_tool_shed_repository.metadata
- dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
+ if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ can_update = True
+ name = installed_tool_shed_repository.name
+ description = installed_tool_shed_repository.description
+ installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
+ metadata_dict = installed_tool_shed_repository.metadata
+ dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ else:
+ # There is a repository already installed which is a dependency of the repository being reinstalled.
+ can_update = False
else:
- # There is a repository already installed which is a dependency of the repository being reinstalled.
can_update = False
else:
if len( all_repo_info_dicts ) == 1:
@@ -243,14 +247,16 @@
created_or_updated_tool_shed_repositories.append( installed_tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
+ elif is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ can_update = True
else:
- can_update = True
+ can_update = False
else:
# A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository
# is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled.
can_update = True
installed_changeset_revision = changeset_revision
- metadata_dict={}
+ metadata_dict = {}
dist_to_shed = False
if can_update:
if reinstalling or install_repository_dependencies:
@@ -294,9 +300,14 @@
current_changeset_revision=changeset_revision,
owner=repository_owner,
dist_to_shed=False )
- created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- tool_panel_section_keys.append( tool_panel_section_key )
- filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ # Only append the tool_shed_repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
+ if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ tool_panel_section_keys.append( tool_panel_section_key )
+ filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ # Build repository dependency relationships even if the user chose to not install repository dependencies.
+ suc.build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
@@ -679,6 +690,73 @@
tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
+def get_repository_readme_and_dependencies_for_display( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ):
+ # If we're installing a single repository, see if it contains a readme or dependencies that we can display.
+ name = repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ # Handle README files.
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = json.from_json_string( raw_text )
+ if repository_dependencies:
+ missing_td = {}
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # In this case, a record for the repository will exist in the database with the status of 'New'.
+ repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
+ if repository and repository.metadata:
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies( trans, repository )
+ else:
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ all_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, rid_installed_td = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if rid_installed_td:
+ for td_key, td_dict in rid_installed_td.items():
+ if td_key not in all_tool_dependencies:
+ all_tool_dependencies[ td_key ] = td_dict
+ if all_tool_dependencies:
+ if installed_td is None:
+ installed_td = {}
+ else:
+ # Move all tool dependencies to the missing_tool_dependencies container.
+ for td_key, td_dict in installed_td.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ installed_td = {}
+ # Discover and categorize all tool dependencies defined for this repository's repository dependencies.
+ required_tool_dependencies, required_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
+ if required_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_tool_dependencies.items():
+ if td_key not in installed_td:
+ installed_td[ td_key ] = td_dict
+ if required_missing_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_tool_dependencies.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ else:
+ installed_rd = None
+ missing_rd = None
+ missing_td = None
+ return name, repository_owner, changeset_revision, readme_files_dict, includes_tool_dependencies, installed_rd, missing_rd, installed_td, missing_td
def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ):
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
@@ -716,7 +794,7 @@
text = response.read()
response.close()
if text:
- required_repo_info_dict = from_json_string( text )
+ required_repo_info_dict = json.from_json_string( text )
required_repo_info_dicts = []
encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
for encoded_dict_str in encoded_dict_strings:
@@ -1015,6 +1093,18 @@
return False
# Default to copying the file if none of the above are true.
return True
+def is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ """Return True if the received repo_info_dict is contained in the list of received repo_info_dicts."""
+ for name, repo_info_tuple in repo_info_dict.items():
+ for rid in repo_info_dicts:
+ for rid_name, rid_repo_info_tuple in rid.items():
+ if rid_name == name:
+ if len( rid_repo_info_tuple ) == len( repo_info_tuple ):
+ for item in rid_repo_info_tuple:
+ if item not in repo_info_tuple:
+ return False
+ return True
+ return False
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype converters
app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
@@ -1073,7 +1163,7 @@
response = urllib2.urlopen( url )
raw_text = response.read()
response.close()
- readme_files_dict = from_json_string( raw_text )
+ readme_files_dict = json.from_json_string( raw_text )
else:
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
diff -r 2ec609faae49b4ab32190403669849f1af0b68f6 -r 66bbee3c9c02b343c4ab621d39efa261b261f237 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1220,9 +1220,6 @@
message=message,
status='error' ) )
if created_or_updated_tool_shed_repositories:
- if install_repository_dependencies:
- # Build repository dependency relationships.
- suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
# Handle contained tools.
if includes_tools and ( new_tool_panel_section or tool_panel_section ):
if new_tool_panel_section:
@@ -1290,73 +1287,10 @@
if len( repo_info_dicts ) == 1:
# If we're installing a single repository, see if it contains a readme or dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
- name = repo_info_dict.keys()[ 0 ]
- repo_info_tuple = repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- # Handle README files.
- url = suc.url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( name, repository_owner, changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = json.from_json_string( raw_text )
- if repository_dependencies:
- missing_tool_dependencies = {}
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
- repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
- if repository and repository.metadata:
- installed_repository_dependencies, missing_repository_dependencies = \
- shed_util.get_installed_and_missing_repository_dependencies( trans, repository )
- else:
- installed_repository_dependencies, missing_repository_dependencies = \
- shed_util.get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
- # Discover all repository dependencies and retrieve information for installing them.
- required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
- # Display tool dependencies defined for each of the repository dependencies.
- if required_repo_info_dicts:
- all_tool_dependencies = {}
- for rid in required_repo_info_dicts:
- for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if tool_dependencies:
- for td_key, td_dict in tool_dependencies.items():
- if td_key not in all_tool_dependencies:
- all_tool_dependencies[ td_key ] = td_dict
- if all_tool_dependencies:
- if tool_dependencies is None:
- tool_dependencies = {}
- else:
- # Move all tool dependencies to the missing_tool_dependencies container.
- for td_key, td_dict in tool_dependencies.items():
- if td_key not in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ] = td_dict
- tool_dependencies = {}
- # Discover and categorize all tool dependencies defined this this repository's repository dependencies.
- required_tool_dependencies, required_missing_tool_dependencies = \
- shed_util.get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
- if required_tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_tool_dependencies.items():
- if td_key not in tool_dependencies:
- tool_dependencies[ td_key ] = td_dict
- if required_missing_tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_missing_tool_dependencies.items():
- if td_key not in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ] = td_dict
- else:
- installed_repository_dependencies = None
- missing_repository_dependencies = None
- missing_tool_dependencies = None
- required_repo_info_dicts = None
- # Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
- # are set to None since we don't yet know what they are.
+ name, repository_owner, changeset_revision, readme_files_dict, includes_tool_dependencies, \
+ installed_repository_dependencies, missing_repository_dependencies, tool_dependencies, missing_tool_dependencies = \
+ shed_util.get_repository_readme_and_dependencies_for_display( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
repository_name=name,
@@ -1374,7 +1308,7 @@
workflows=None,
new_install=True,
reinstalling=False )
- # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we're retrieved the list of installed
+ # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
# dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since
@@ -1489,24 +1423,16 @@
repository_dependencies=repository_dependencies )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
- if install_repository_dependencies:
- created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
- reinstalling=True,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
- if len( created_or_updated_tool_shed_repositories ) > 1:
- # Build repository dependency relationships.
- suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
- else:
- filtered_repo_info_dicts = [ repo_info_dict for repo_info_dict in repo_info_dicts ]
- created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
- tool_panel_section_keys.append( tool_panel_section_key )
+ created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
+ shed_util.create_repository_dependency_objects( trans,
+ tool_path,
+ tool_shed_url,
+ repo_info_dicts,
+ reinstalling=True,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
# Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2ec609faae49/
changeset: 2ec609faae49
user: greg
date: 2013-01-08 19:50:16
summary: When installing a new repository from the too shed repository to Galaxy or reinstalling an uninstalled repository, handle tool dependencies defined for repository dependencies defined for the repository being installed.
affected #: 6 files
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -653,6 +653,22 @@
tool_dependencies = None
missing_tool_dependencies = None
return tool_dependencies, missing_tool_dependencies
+def get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies ):
+ """Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy."""
+ # FIXME: this method currently populates and returns only missing tool dependencies since tool dependencies defined for complex repository dependency
+ # relationships is not currently supported. This method should be enhanced to search for installed tool dependencies defined as complex repository
+ # dependency relationships when that feature is implemented.
+ if all_tool_dependencies:
+ tool_dependencies = {}
+ missing_tool_dependencies = {}
+ for td_key, val in all_tool_dependencies.items():
+ # Since we have a new install, missing tool dependencies have never been installed.
+ val[ 'status' ] = trans.model.ToolDependency.installation_status.NEVER_INSTALLED
+ missing_tool_dependencies[ td_key ] = val
+ else:
+ tool_dependencies = None
+ missing_tool_dependencies = None
+ return tool_dependencies, missing_tool_dependencies
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -667,7 +683,7 @@
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
- this methid is required to retrieve all repository dependencies.
+ this method is required to retrieve all repository dependencies.
"""
all_repo_info_dicts = []
if repo_info_dicts:
@@ -1035,7 +1051,7 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False, required_repo_info_dicts=None ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This method is called only
from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled
@@ -1064,15 +1080,51 @@
readme_files_dict = None
# Handle repository dependencies.
installed_repository_dependencies, missing_repository_dependencies = get_installed_and_missing_repository_dependencies( trans, repository )
- # Handle tool dependencies.
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- installed_tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ # Handle the current repository's tool dependencies.
+ repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies( trans, repository, repository_tool_dependencies )
if reinstalling:
- # All tool dependencies will be considered missing since we are reinstalling the repository.
- if installed_tool_dependencies:
- for td in installed_tool_dependencies:
- missing_tool_dependencies.append( td )
- installed_tool_dependencies = None
+ installed_tool_dependencies = None
+ missing_tool_dependencies = None
+ if repository_installed_tool_dependencies is None:
+ repository_installed_tool_dependencies = {}
+ if repository_missing_tool_dependencies is None:
+ repository_missing_tool_dependencies = {}
+ if required_repo_info_dicts:
+ # Handle the tool dependencies defined for each of the repository's repository dependencies.
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
+ required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, repository_owner, changeset_revision )
+ if not required_repository:
+ # The required_repository may have been installed with a different changeset revision.
+ required_repository, installed_changeset_revision = repository_was_previously_installed( trans,
+ tool_shed_url,
+ name,
+ repo_info_tuple,
+ clone_dir )
+ if required_repository:
+ required_repository_installed_tool_dependencies, required_repository_missing_tool_dependencies = \
+ get_installed_and_missing_tool_dependencies( trans, required_repository, tool_dependencies )
+ if required_repository_installed_tool_dependencies:
+ for td_key, td_dict in required_repository_installed_tool_dependencies.items():
+ if td_key not in repository_installed_tool_dependencies:
+ repository_installed_tool_dependencies[ td_key ] = td_dict
+ if required_repository_missing_tool_dependencies:
+ for td_key, td_dict in required_repository_missing_tool_dependencies.items():
+ if td_key not in repository_missing_tool_dependencies:
+ repository_missing_tool_dependencies[ td_key ] = td_dict
+ if repository_installed_tool_dependencies:
+ installed_tool_dependencies = repository_installed_tool_dependencies
+ if repository_missing_tool_dependencies:
+ missing_tool_dependencies = repository_missing_tool_dependencies
+ else:
+ installed_tool_dependencies = repository_installed_tool_dependencies
+ missing_tool_dependencies = repository_missing_tool_dependencies
# Handle valid tools.
valid_tools = metadata.get( 'tools', None )
# Handle workflows.
@@ -1092,7 +1144,8 @@
tool_dependencies=installed_tool_dependencies,
valid_tools=valid_tools,
workflows=workflows,
- new_install=False )
+ new_install=False,
+ reinstalling=reinstalling )
else:
containers_dict = dict( datatypes=None,
invalid_tools=None,
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -78,7 +78,7 @@
return readme_files_dict
def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes,
invalid_tools, missing_repository_dependencies, missing_tool_dependencies, readme_files_dict,
- repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False ):
+ repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False, reinstalling=False ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -160,7 +160,8 @@
tool_dependencies,
label=label,
missing=False,
- new_install=new_install )
+ new_install=new_install,
+ reinstalling=reinstalling )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
@@ -170,7 +171,8 @@
missing_tool_dependencies,
label='Missing tool dependencies',
missing=True,
- new_install=new_install )
+ new_install=new_install,
+ reinstalling=reinstalling )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
@@ -790,7 +792,7 @@
Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy from two places:
+ This method is called from Galaxy in two places:
1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
repository and repository_metadata will be objects.
2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
@@ -2414,6 +2416,47 @@
removed = True
error_message = ''
return removed, error_message
+def repository_dependencies_have_tool_dependencies( trans, repository_dependencies ):
+ """
+ repository_dependencies':
+ {'http://localhost:9009__ESEP__emboss_6__ESEP__test__ESEP__92bedb60b0c9':
+ [['http://localhost:9009', 'emboss_datatypes', 'test', '27df73fe48a6']],
+ 'root_key': 'http://localhost:9009__ESEP__emboss__ESEP__test__ESEP__06d729cb3f34',
+ 'description': 'required to enable emboss 6 tools',
+ 'http://localhost:9009__ESEP__emboss__ESEP__test__ESEP__06d729cb3f34':
+ [['http://localhost:9009', 'emboss_6', 'test', '92bedb60b0c9']]}}
+ """
+ rd_tups_processed = []
+ for key, rd_tups in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ rd_tup = container_util.get_components_from_key( key )
+ if rd_tup not in rd_tups_processed:
+ toolshed, name, owner, changeset_revision = rd_tup
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ return True
+ rd_tups_processed.append( rd_tup )
+ for rd_tup in rd_tups:
+ if rd_tup not in rd_tups_processed:
+ toolshed, name, owner, changeset_revision = rd_tup
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if 'tool_dependencies' in metadata:
+ return True
+ rd_tups_processed.append( rd_tup )
+ return False
def reset_all_metadata_on_installed_repository( trans, id ):
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
repository = get_installed_tool_shed_repository( trans, id )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -786,9 +786,9 @@
operation='preview_tools_in_changeset',
repository_id=repository_id )
self.valid_repository_grid.operations = [ grids.GridOperation( "Preview and install",
- url_args=url_args,
- allow_multiple=False,
- async_compatible=False ) ]
+ url_args=url_args,
+ allow_multiple=False,
+ async_compatible=False ) ]
return self.valid_repository_grid( trans, **kwd )
def __build_allow_push_select_field( self, trans, current_push_list, selected_value='none' ):
options = []
@@ -1410,7 +1410,10 @@
repo_info_dicts=repo_info_dicts )
@web.json
def get_required_repo_info_dict( self, trans, encoded_str ):
- """Retrive a list of dictionaries that each contain all of the information needed to install the list of repositories defined by encoded_str."""
+ """
+ Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
+ repositories defined by the received encoded_str.
+ """
encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
decoded_required_repository_tups = []
@@ -1430,7 +1433,7 @@
return repo_info_dict
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
- """Handle a request from the InstallManager of a local Galaxy instance."""
+ """Handle a request from a Galaxy instance."""
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -291,7 +291,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False, reinstalling=False ):
"""Return a folder hierarchy containing tool dependencies."""
# The status will be displayed only if the received value for missing is True. When this is the case, we're in Galaxy (not the tool shed)
# and the tool dependencies are not installed or are in an error state, so they are considered missing. The tool dependency status will be
@@ -304,7 +304,9 @@
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
if trans.webapp.name == 'galaxy':
- if missing:
+ if reinstalling:
+ folder.description = "this repository's tools require handling of these dependencies"
+ elif missing and not reinstalling:
folder.description = 'click the name to install the missing dependency'
else:
if new_install:
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1024,6 +1024,8 @@
decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd )
tsr_ids = decoded_kwd[ 'tool_shed_repository_ids' ]
tool_panel_section_keys = decoded_kwd[ 'tool_panel_section_keys' ]
+ repo_info_dicts = decoded_kwd[ 'repo_info_dicts' ]
+ filtered_repo_info_dicts = []
filtered_tool_panel_section_keys = []
repositories_for_installation = []
for index, tsr_id in enumerate( tsr_ids ):
@@ -1031,8 +1033,10 @@
if repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
+ filtered_repo_info_dicts.append( repo_info_dicts[ index ] )
filtered_tool_panel_section_keys.append( tool_panel_section_keys[ index ] )
if repositories_for_installation:
+ decoded_kwd[ 'repo_info_dicts' ] = filtered_repo_info_dicts
decoded_kwd[ 'tool_panel_section_keys' ] = filtered_tool_panel_section_keys
self.install_tool_shed_repositories( trans, repositories_for_installation, reinstalling=reinstalling, **decoded_kwd )
else:
@@ -1152,7 +1156,6 @@
return trans.show_error_message( message )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
tool_shed_url = kwd[ 'tool_shed_url' ]
# Handle repository dependencies.
includes_repository_dependencies = util.string_as_bool( kwd.get( 'includes_repository_dependencies', False ) )
@@ -1259,7 +1262,6 @@
shed_tool_conf=shed_tool_conf,
status=status,
tool_path=tool_path,
-
tool_panel_section_keys=tool_panel_section_keys,
tool_shed_repository_ids=tsrids_list,
tool_shed_url=tool_shed_url )
@@ -1300,9 +1302,10 @@
raw_text = response.read()
response.close()
readme_files_dict = json.from_json_string( raw_text )
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
- # In this case, a record for the repository will exist in the database with the status of 'New'.
if repository_dependencies:
+ missing_tool_dependencies = {}
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # In this case, a record for the repository will exist in the database with the status of 'New'.
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
installed_repository_dependencies, missing_repository_dependencies = \
@@ -1310,9 +1313,48 @@
else:
installed_repository_dependencies, missing_repository_dependencies = \
shed_util.get_installed_and_missing_repository_dependencies_for_new_install( trans, repository_dependencies )
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ all_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ for td_key, td_dict in tool_dependencies.items():
+ if td_key not in all_tool_dependencies:
+ all_tool_dependencies[ td_key ] = td_dict
+ if all_tool_dependencies:
+ if tool_dependencies is None:
+ tool_dependencies = {}
+ else:
+ # Move all tool dependencies to the missing_tool_dependencies container.
+ for td_key, td_dict in tool_dependencies.items():
+ if td_key not in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ] = td_dict
+ tool_dependencies = {}
+ # Discover and categorize all tool dependencies defined this this repository's repository dependencies.
+ required_tool_dependencies, required_missing_tool_dependencies = \
+ shed_util.get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies )
+ if required_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_tool_dependencies.items():
+ if td_key not in tool_dependencies:
+ tool_dependencies[ td_key ] = td_dict
+ if required_missing_tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_tool_dependencies.items():
+ if td_key not in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ] = td_dict
else:
installed_repository_dependencies = None
missing_repository_dependencies = None
+ missing_tool_dependencies = None
+ required_repo_info_dicts = None
# Since we are installing a new repository, no tool dependencies will be considered "missing". Most of the repository contents
# are set to None since we don't yet know what they are.
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
@@ -1324,13 +1366,14 @@
datatypes=None,
invalid_tools=None,
missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=None,
+ missing_tool_dependencies=missing_tool_dependencies,
readme_files_dict=readme_files_dict,
repository_dependencies=installed_repository_dependencies,
tool_dependencies=tool_dependencies,
valid_tools=None,
workflows=None,
- new_install=True )
+ new_install=True,
+ reinstalling=False )
# We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we're retrieved the list of installed
# and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
# so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
@@ -1338,6 +1381,7 @@
# each displayed repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
else:
+ # FIXME: support the intallation of repository dependencies and tool dependencies for a list of tool shed repositories being installed.
containers_dict = dict( datatypes=None,
invalid_tools=None,
readme_files_dict=None,
@@ -1395,6 +1439,8 @@
tool_panel_section_key = None
tool_panel_section_keys = []
metadata = tool_shed_repository.metadata
+ # Keep track of tool dependencies define dfor the current repository or those defined for any of it's repository dependencies.
+ includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
if tool_shed_repository.includes_tools:
# Handle the selected tool panel location for loading tools included in the tool shed repository.
tool_section, new_tool_panel_section, tool_panel_section_key = \
@@ -1422,8 +1468,7 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
- # Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
- # were introduced, it may never happen, but we'll keep the block just in case.
+ # Entering this else block occurs only if the tool_shed_repository does not include any valid tools.
if install_repository_dependencies:
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
@@ -1462,13 +1507,13 @@
filtered_repo_info_dicts = [ repo_info_dict for repo_info_dict in repo_info_dicts ]
created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
tool_panel_section_keys.append( tool_panel_section_key )
- # Defaulot the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
+ # Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location
# selected for the repository selected for reinstallation.
for index, tps_key in enumerate( tool_panel_section_keys ):
if tps_key is None:
tool_panel_section_keys[ index ] = tool_panel_section_key
encoded_repository_ids = [ trans.security.encode_id( r.id ) for r in created_or_updated_tool_shed_repositories ]
- new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
+ new_kwd = dict( includes_tool_dependencies=includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
install_tool_dependencies=install_tool_dependencies,
repo_info_dicts=filtered_repo_info_dicts,
@@ -1539,13 +1584,17 @@
repository_metadata=None,
metadata=metadata,
repository_dependencies=repository_dependencies )
+ if includes_repository_dependencies:
+ # Discover all repository dependencies and retrieve information for installing them.
+ required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ else:
+ required_repo_info_dicts = None
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if tool_panel_dict:
if shed_util.panel_entry_per_tool( tool_panel_dict ):
- # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
- # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ # The following forces everything to be loaded into 1 section (or no section) in the tool panel.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
tool_section_dict = tool_section_dicts[ 0 ]
original_section_name = tool_section_dict[ 'name' ]
@@ -1567,7 +1616,14 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, tool_shed_repository, reinstalling=True )
+ # Populate the containers_dict from the metadata for the tool shed repository we're reinstalling, but make sure to include tool dependencies defined for
+ # all of the repository's repository dependencies.
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ repository=tool_shed_repository,
+ reinstalling=True,
+ required_repo_info_dicts=required_repo_info_dicts )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
diff -r 530fb4f8204f2106e11f419c381fd53d2319ce24 -r 2ec609faae49b4ab32190403669849f1af0b68f6 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -91,6 +91,7 @@
repository_dependencies_root_folder = containers_dict[ 'repository_dependencies' ]
tool_dependencies_root_folder = containers_dict[ 'tool_dependencies' ]
+ missing_tool_dependencies_root_folder = containers_dict[ 'missing_tool_dependencies' ]
env_settings_heaader_row_displayed = False
package_header_row_displayed = False
%>
@@ -122,7 +123,7 @@
<div style="clear: both"></div></div>
%endif
- %if tool_dependencies_root_folder:
+ %if tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
%if install_tool_dependencies_check_box is not None:
<div class="form-row"><label>Handle tool dependencies?</label>
@@ -138,14 +139,26 @@
</div><div style="clear: both"></div>
%endif
- <div class="form-row">
- <p/>
- <% row_counter = RowCounter() %>
- <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
- ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
- </table>
- <div style="clear: both"></div>
- </div>
+ %if tool_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+ ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ %if missing_tool_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+ ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
%endif
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/530fb4f8204f/
changeset: 530fb4f8204f
user: dan
date: 2013-01-08 16:41:32
summary: Fix for grouping parameters value_from_basic when ignore_errors is True. Fixes issue seen in workflows and rerun where an invalid stored value (e.g. due to changing parameter types) is provided.
affected #: 1 file
diff -r 94bfcccdaf0625448e623d1d88ba9a1b7785ee78 -r 530fb4f8204f2106e11f419c381fd53d2319ce24 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -68,21 +68,25 @@
return rval
def value_from_basic( self, value, app, ignore_errors=False ):
rval = []
- for i, d in enumerate( value ):
- rval_dict = {}
- # If the special __index__ key is not set, create it (for backward
- # compatibility)
- rval_dict['__index__'] = d.get( '__index__', i )
- # Restore child inputs
- for input in self.inputs.itervalues():
- if ignore_errors and input.name not in d:
- # If we do not have a value, and are ignoring errors, we simply
- # do nothing. There will be no value for the parameter in the
- # conditional's values dictionary.
- pass
- else:
- rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
- rval.append( rval_dict )
+ try:
+ for i, d in enumerate( value ):
+ rval_dict = {}
+ # If the special __index__ key is not set, create it (for backward
+ # compatibility)
+ rval_dict['__index__'] = d.get( '__index__', i )
+ # Restore child inputs
+ for input in self.inputs.itervalues():
+ if ignore_errors and input.name not in d:
+ # If we do not have a value, and are ignoring errors, we simply
+ # do nothing. There will be no value for the parameter in the
+ # conditional's values dictionary.
+ pass
+ else:
+ rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+ rval.append( rval_dict )
+ except Exception, e:
+ if not ignore_errors:
+ raise e
return rval
def visit_inputs( self, prefix, value, callback ):
for i, d in enumerate( value ):
@@ -441,24 +445,28 @@
return rval
def value_from_basic( self, value, app, ignore_errors=False ):
rval = dict()
- current_case = rval['__current_case__'] = value['__current_case__']
- # Test param
- if ignore_errors and self.test_param.name not in value:
- # If ignoring errors, do nothing. However this is potentially very
- # problematic since if we are missing the value of test param,
- # the entire conditional is wrong.
- pass
- else:
- rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors )
- # Inputs associated with current case
- for input in self.cases[current_case].inputs.itervalues():
- if ignore_errors and input.name not in value:
- # If we do not have a value, and are ignoring errors, we simply
- # do nothing. There will be no value for the parameter in the
- # conditional's values dictionary.
+ try:
+ current_case = rval['__current_case__'] = value['__current_case__']
+ # Test param
+ if ignore_errors and self.test_param.name not in value:
+ # If ignoring errors, do nothing. However this is potentially very
+ # problematic since if we are missing the value of test param,
+ # the entire conditional is wrong.
pass
else:
- rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+ rval[ self.test_param.name ] = self.test_param.value_from_basic( value[ self.test_param.name ], app, ignore_errors )
+ # Inputs associated with current case
+ for input in self.cases[current_case].inputs.itervalues():
+ if ignore_errors and input.name not in value:
+ # If we do not have a value, and are ignoring errors, we simply
+ # do nothing. There will be no value for the parameter in the
+ # conditional's values dictionary.
+ pass
+ else:
+ rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+ except Exception, e:
+ if not ignore_errors:
+ raise e
return rval
def visit_inputs( self, prefix, value, callback ):
current_case = value['__current_case__']
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fix bug in bowtie2 wrapper that prevented wrapper from working with old versions of samtools.
by Bitbucket 08 Jan '13
by Bitbucket 08 Jan '13
08 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/94bfcccdaf06/
changeset: 94bfcccdaf06
user: jgoecks
date: 2013-01-08 15:25:16
summary: Fix bug in bowtie2 wrapper that prevented wrapper from working with old versions of samtools.
affected #: 1 file
diff -r 989c789fbc43b0d9960655752c6ba396ac7618af -r 94bfcccdaf0625448e623d1d88ba9a1b7785ee78 tools/sr_mapping/bowtie2_wrapper.py
--- a/tools/sr_mapping/bowtie2_wrapper.py
+++ b/tools/sr_mapping/bowtie2_wrapper.py
@@ -71,7 +71,7 @@
index_path = options.index_path
# Build bowtie command; use view and sort to create sorted bam.
- cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - sorted > %s'
+ cmd = 'bowtie2 %s -x %s %s | samtools view -Su - | samtools sort -o - - > %s'
# Set up reads.
if options.single_paired == 'paired':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: comment out two eggs from eggs.ini for now
by Bitbucket 07 Jan '13
by Bitbucket 07 Jan '13
07 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/989c789fbc43/
changeset: 989c789fbc43
user: james_taylor
date: 2013-01-07 22:25:54
summary: comment out two eggs from eggs.ini for now
affected #: 1 file
diff -r d8606b61644a09719a8042efd2b23b7aa7869ff2 -r 989c789fbc43b0d9960655752c6ba396ac7618af eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,7 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
-msgpack_python = 0.2.4
+; msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
@@ -66,7 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
-fluent_logger = 0.3.3
+; fluent_logger = 0.3.3
; extra version information
[tags]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
07 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d8606b61644a/
changeset: d8606b61644a
user: inithello
date: 2013-01-07 22:20:23
summary: Revert fix from 8530:df20658ac499.
affected #: 1 file
diff -r 09cf284087021586ad08656b7ea444959c6c49bf -r d8606b61644a09719a8042efd2b23b7aa7869ff2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -848,7 +848,8 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
tool_section = None
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
@@ -1418,7 +1419,8 @@
repo_info_dicts = []
repo_info_dict = kwd.get( 'repo_info_dict', None )
if repo_info_dict:
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
# Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
# were introduced, it may never happen, but we'll keep the block just in case.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
7 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/67379010fc77/
changeset: 67379010fc77
user: james_taylor
date: 2012-10-02 17:03:48
summary: tracing: first pass trace logging to fluentd
affected #: 9 files
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+msgpack_python = 0.2.2
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+fluent_logger = 0.3.3
; extra version information
[tags]
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -28,6 +28,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -53,7 +54,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -143,6 +145,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -155,3 +158,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -232,6 +232,11 @@
for k, v in amqp_config:
self.amqp[k] = v
self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
+
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
tool_runners_config = global_conf_parser.items( section )
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1868,7 +1868,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1876,10 +1876,10 @@
Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
- # Should we use the logging proxy?
- if database_query_profiling_proxy:
+ # If metlog is enabled, do micrologging
+ if trace_logger:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
- proxy = logging_connection_proxy.LoggingProxy()
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration, stack=pretty_stack() )
+ return rval
\ No newline at end of file
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/util/log/__init__.py
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,5 @@
+class TraceLogger( object ):
+ def __init__( self, name ):
+ self.name = name
+ def log( **kwargs ):
+ raise TypeError( "Abstract Method" )
\ No newline at end of file
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/util/log/fluent_log.py
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,37 @@
+import time
+import threading
+
+import galaxy.eggs
+galaxy.eggs.require( "fluent-logger" )
+galaxy.eggs.require( "msgpack_python" )
+
+
+from fluent.sender import FluentSender
+
+
+class FluentTraceLogger( object ):
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
+
+ def context_push( self, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.thread_local.context.append( value )
+ self.lock.release()
+
+ def context_pop( self ):
+ self.lock.acquire()
+ self.thread_local.context.pop()
+ self.lock.release()
+
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.lock.release()
+ kwargs['log_context'] = self.thread_local.context
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -8,6 +8,8 @@
import os.path
import sys
import tarfile
+import threading
+import uuid
from Cookie import SimpleCookie
@@ -68,6 +70,9 @@
self.mapper.explicit = False
self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
+ # Each request will have a unique id. Since we are assuming
+ # a threaded model for the moment we can store that here
+ self.request_id = threading.local()
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -106,12 +111,20 @@
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
self.api_mapper.create_regs( self.api_controllers.keys() )
+ def trace( self, **fields ):
+ if self.trace_logger:
+ self.trace_logger.log( "WebApplication", **fields )
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
friendly objects, finds the appropriate method to handle the request
and calls it.
"""
+ # Immediately create request_id which we will use for logging
+ self.request_id = request_id = uuid.uuid1().hex
+ if self.trace_logger:
+ self.trace_logger.context_push( dict( request_id = request_id ) )
+ self.trace( message= "Starting request" )
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
@@ -125,6 +138,7 @@
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+ self.trace( path_info=path_info, map=map )
# Setup routes
rc = routes.request_config()
rc.mapper = mapper
diff -r 0445cd851094b8bad61d2a96f399538f74e5db03 -r 67379010fc77b462178e3d580e660df220d20634 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -110,7 +110,13 @@
webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", controller="workflows", action="import_new_workflow", conditions=dict(method=["POST"]))
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+ # Connect logger from app
+ if app.trace_logger:
+ webapp.trace_logger = app.trace_logger
+
+ # Indicate that all configuration settings have been provided
webapp.finalize_config()
+
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
https://bitbucket.org/galaxy/galaxy-central/commits/33d256f3121a/
changeset: 33d256f3121a
user: james_taylor
date: 2013-01-03 22:54:14
summary: merge
affected #: 9 files
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+msgpack_python = 0.2.2
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+fluent_logger = 0.3.3
; extra version information
[tags]
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -29,6 +29,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -54,7 +55,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -149,6 +151,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -161,3 +164,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -261,6 +261,10 @@
self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
# This is for testing new library browsing capabilities.
self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1950,7 +1950,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1958,10 +1958,10 @@
Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
- # Should we use the logging proxy?
- if database_query_profiling_proxy:
+ # If metlog is enabled, do micrologging
+ if trace_logger:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
- proxy = logging_connection_proxy.LoggingProxy()
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration, stack=pretty_stack() )
+ return rval
\ No newline at end of file
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/util/log/__init__.py
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,5 @@
+class TraceLogger( object ):
+ def __init__( self, name ):
+ self.name = name
+ def log( **kwargs ):
+ raise TypeError( "Abstract Method" )
\ No newline at end of file
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/util/log/fluent_log.py
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,37 @@
+import time
+import threading
+
+import galaxy.eggs
+galaxy.eggs.require( "fluent-logger" )
+galaxy.eggs.require( "msgpack_python" )
+
+
+from fluent.sender import FluentSender
+
+
+class FluentTraceLogger( object ):
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
+
+ def context_push( self, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.thread_local.context.append( value )
+ self.lock.release()
+
+ def context_pop( self ):
+ self.lock.acquire()
+ self.thread_local.context.pop()
+ self.lock.release()
+
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = []
+ self.lock.release()
+ kwargs['log_context'] = self.thread_local.context
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -8,6 +8,8 @@
import os.path
import sys
import tarfile
+import threading
+import uuid
from Cookie import SimpleCookie
@@ -68,6 +70,9 @@
self.mapper.explicit = False
self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
+ # Each request will have a unique id. Since we are assuming
+ # a threaded model for the moment we can store that here
+ self.request_id = threading.local()
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -106,12 +111,20 @@
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
self.api_mapper.create_regs( self.api_controllers.keys() )
+ def trace( self, **fields ):
+ if self.trace_logger:
+ self.trace_logger.log( "WebApplication", **fields )
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
friendly objects, finds the appropriate method to handle the request
and calls it.
"""
+ # Immediately create request_id which we will use for logging
+ self.request_id = request_id = uuid.uuid1().hex
+ if self.trace_logger:
+ self.trace_logger.context_push( dict( request_id = request_id ) )
+ self.trace( message= "Starting request" )
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
@@ -125,6 +138,7 @@
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+ self.trace( path_info=path_info, map=map )
# Setup routes
rc = routes.request_config()
rc.mapper = mapper
diff -r d7475647cbb6a1c70218049fadc24ed1651d845a -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -130,7 +130,13 @@
webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", controller="workflows", action="import_new_workflow", conditions=dict(method=["POST"]))
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+ # Connect logger from app
+ if app.trace_logger:
+ webapp.trace_logger = app.trace_logger
+
+ # Indicate that all configuration settings have been provided
webapp.finalize_config()
+
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
https://bitbucket.org/galaxy/galaxy-central/commits/33750f347be2/
changeset: 33750f347be2
user: james_taylor
date: 2013-01-04 20:26:22
summary: update msgpack_python version for trace logging, don't log entire stack with each query
affected #: 2 files
diff -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,7 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
-msgpack_python = 0.2.2
+msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
diff -r 33d256f3121a7f32aaa87b4e34a110fcea57a36e -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -44,5 +44,5 @@
duration = time.clock() - start
self.trace_logger.log( "sqlalchemy_query",
message="Query executed", statement=statement, parameters=parameters,
- executemany=executemany, duration=duration, stack=pretty_stack() )
+ executemany=executemany, duration=duration )
return rval
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/commits/8331f2af6a90/
changeset: 8331f2af6a90
user: james_taylor
date: 2013-01-04 22:48:35
summary: trace logging: flatten context
affected #: 2 files
diff -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a -r 8331f2af6a9092f4528fb01ca7baf3d7999838a0 lib/galaxy/util/log/fluent_log.py
--- a/lib/galaxy/util/log/fluent_log.py
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -1,3 +1,7 @@
+"""
+Provides a `TraceLogger` implementation that logs to a fluentd collector
+"""
+
import time
import threading
@@ -5,33 +9,31 @@
galaxy.eggs.require( "fluent-logger" )
galaxy.eggs.require( "msgpack_python" )
-
from fluent.sender import FluentSender
class FluentTraceLogger( object ):
- def __init__( self, name, host='localhost', port=24224 ):
- self.lock = threading.Lock()
- self.thread_local = threading.local()
- self.name = name
- self.sender = FluentSender( self.name, host=host, port=port )
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
- def context_push( self, value ):
- self.lock.acquire()
- if not hasattr( self.thread_local, 'context' ):
- self.thread_local.context = []
- self.thread_local.context.append( value )
- self.lock.release()
+ def context_set( self, key, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = {}
+ self.thread_local.context[key] = value
+ self.lock.release()
- def context_pop( self ):
- self.lock.acquire()
- self.thread_local.context.pop()
- self.lock.release()
+ def context_remove( self, key ):
+ self.lock.acquire()
+ del self.thread_local.context[key]
+ self.lock.release()
- def log( self, label, **kwargs ):
- self.lock.acquire()
- if not hasattr( self.thread_local, 'context' ):
- self.thread_local.context = []
- self.lock.release()
- kwargs['log_context'] = self.thread_local.context
- self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if hasattr( self.thread_local, 'context' ):
+ kwargs.update( self.thread_local.context )
+ self.lock.release()
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r 33750f347be2aef72fe4e32d5c2197dd82f2b45a -r 8331f2af6a9092f4528fb01ca7baf3d7999838a0 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -123,8 +123,16 @@
# Immediately create request_id which we will use for logging
self.request_id = request_id = uuid.uuid1().hex
if self.trace_logger:
- self.trace_logger.context_push( dict( request_id = request_id ) )
- self.trace( message= "Starting request" )
+ self.trace_logger.context_set( "request_id", request_id )
+ self.trace( message="Starting request" )
+ try:
+ return self.handle_request( environ, start_response )
+ finally:
+ self.trace( message="Handle request finished" )
+ if self.trace_logger:
+ self.trace_logger.context_remove( "request_id" )
+
+ def handle_request( self, environ, start_response ):
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
https://bitbucket.org/galaxy/galaxy-central/commits/26f38d9bd3ee/
changeset: 26f38d9bd3ee
user: james_taylor
date: 2013-01-07 21:07:16
summary: Restore support for logging connection proxy
affected #: 1 file
diff -r 8331f2af6a9092f4528fb01ca7baf3d7999838a0 -r 26f38d9bd3ee9902aa5661d046604f6fb2bb96b3 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1958,8 +1958,12 @@
Dataset.object_store = object_store
# Load the appropriate db module
load_egg_for_url( url )
+ # Should we use the logging proxy?
+ if database_query_profiling_proxy:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.LoggingProxy()
# If metlog is enabled, do micrologging
- if trace_logger:
+ elif trace_logger:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
https://bitbucket.org/galaxy/galaxy-central/commits/5622f8127e9d/
changeset: 5622f8127e9d
user: james_taylor
date: 2013-01-07 21:12:18
summary: tracing: fix for when trace logger is disabled
affected #: 1 file
diff -r 26f38d9bd3ee9902aa5661d046604f6fb2bb96b3 -r 5622f8127e9ddf52baba4ad901e32deacd378f7c lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -73,6 +73,8 @@
# Each request will have a unique id. Since we are assuming
# a threaded model for the moment we can store that here
self.request_id = threading.local()
+ # Set if trace logging is enabled
+ self.trace_logger = None
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -114,6 +116,7 @@
def trace( self, **fields ):
if self.trace_logger:
self.trace_logger.log( "WebApplication", **fields )
+
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
https://bitbucket.org/galaxy/galaxy-central/commits/09cf28408702/
changeset: 09cf28408702
user: james_taylor
date: 2013-01-07 21:12:47
summary: Automated merge with ssh://bitbucket.org/galaxy/galaxy-central
affected #: 9 files
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+fluent_logger = 0.3.3
; extra version information
[tags]
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -29,6 +29,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -54,7 +55,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -149,6 +151,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -161,3 +164,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -261,6 +261,10 @@
self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
# This is for testing new library browsing capabilities.
self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1950,7 +1950,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1962,6 +1962,10 @@
if database_query_profiling_proxy:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
proxy = logging_connection_proxy.LoggingProxy()
+ # If metlog is enabled, do micrologging
+ elif trace_logger:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration )
+ return rval
\ No newline at end of file
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/util/log/__init__.py
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,5 @@
+class TraceLogger( object ):
+ def __init__( self, name ):
+ self.name = name
+ def log( **kwargs ):
+ raise TypeError( "Abstract Method" )
\ No newline at end of file
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/util/log/fluent_log.py
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,39 @@
+"""
+Provides a `TraceLogger` implementation that logs to a fluentd collector
+"""
+
+import time
+import threading
+
+import galaxy.eggs
+galaxy.eggs.require( "fluent-logger" )
+galaxy.eggs.require( "msgpack_python" )
+
+from fluent.sender import FluentSender
+
+
+class FluentTraceLogger( object ):
+ def __init__( self, name, host='localhost', port=24224 ):
+ self.lock = threading.Lock()
+ self.thread_local = threading.local()
+ self.name = name
+ self.sender = FluentSender( self.name, host=host, port=port )
+
+ def context_set( self, key, value ):
+ self.lock.acquire()
+ if not hasattr( self.thread_local, 'context' ):
+ self.thread_local.context = {}
+ self.thread_local.context[key] = value
+ self.lock.release()
+
+ def context_remove( self, key ):
+ self.lock.acquire()
+ del self.thread_local.context[key]
+ self.lock.release()
+
+ def log( self, label, **kwargs ):
+ self.lock.acquire()
+ if hasattr( self.thread_local, 'context' ):
+ kwargs.update( self.thread_local.context )
+ self.lock.release()
+ self.sender.emit_with_time( label, int(time.time()), kwargs )
\ No newline at end of file
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -8,6 +8,8 @@
import os.path
import sys
import tarfile
+import threading
+import uuid
from Cookie import SimpleCookie
@@ -68,6 +70,11 @@
self.mapper.explicit = False
self.api_mapper = routes.Mapper()
self.transaction_factory = DefaultWebTransaction
+ # Each request will have a unique id. Since we are assuming
+ # a threaded model for the moment we can store that here
+ self.request_id = threading.local()
+ # Set if trace logging is enabled
+ self.trace_logger = None
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
@@ -106,12 +113,29 @@
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
self.api_mapper.create_regs( self.api_controllers.keys() )
+ def trace( self, **fields ):
+ if self.trace_logger:
+ self.trace_logger.log( "WebApplication", **fields )
+
def __call__( self, environ, start_response ):
"""
Call interface as specified by WSGI. Wraps the environment in user
friendly objects, finds the appropriate method to handle the request
and calls it.
"""
+ # Immediately create request_id which we will use for logging
+ self.request_id = request_id = uuid.uuid1().hex
+ if self.trace_logger:
+ self.trace_logger.context_set( "request_id", request_id )
+ self.trace( message="Starting request" )
+ try:
+ return self.handle_request( environ, start_response )
+ finally:
+ self.trace( message="Handle request finished" )
+ if self.trace_logger:
+ self.trace_logger.context_remove( "request_id" )
+
+ def handle_request( self, environ, start_response ):
# Map url using routes
path_info = environ.get( 'PATH_INFO', '' )
map = self.mapper.match( path_info, environ )
@@ -125,6 +149,7 @@
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+ self.trace( path_info=path_info, map=map )
# Setup routes
rc = routes.request_config()
rc.mapper = mapper
diff -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 -r 09cf284087021586ad08656b7ea444959c6c49bf lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -130,7 +130,13 @@
webapp.api_mapper.connect("import_workflow", "/api/workflows/upload", controller="workflows", action="import_new_workflow", conditions=dict(method=["POST"]))
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+ # Connect logger from app
+ if app.trace_logger:
+ webapp.trace_logger = app.trace_logger
+
+ # Indicate that all configuration settings have been provided
webapp.finalize_config()
+
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/df20658ac499/
changeset: df20658ac499
user: inithello
date: 2013-01-07 18:19:00
summary: Fix for encoded repo_info_dict being passed instead of decoded.
affected #: 1 file
diff -r ce62bf5a91f86d0c53764bda3df4975486d7512e -r df20658ac4991d970bfd9d63feae73850db84bb7 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -848,8 +848,7 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
tool_section = None
- if isinstance( repo_info_dict, basestring ):
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
@@ -1419,8 +1418,7 @@
repo_info_dicts = []
repo_info_dict = kwd.get( 'repo_info_dict', None )
if repo_info_dict:
- if isinstance( repo_info_dict, basestring ):
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
else:
# Entering this else block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
# were introduced, it may never happen, but we'll keep the block just in case.
@@ -1442,12 +1440,9 @@
repository_metadata=None,
metadata=metadata,
repository_dependencies=repository_dependencies )
- repo_info_dict = encoding_util.tool_shed_encode( repo_info_dict )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
if install_repository_dependencies:
- # This is a bit screwy because filtered_repo_info_dicts in this block is a list of tool_shed_encoded dictionaries, but
- # in the associated else block, it is a list of unencoded dictionaries - not sure if this should be corrected...
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
shed_util.create_repository_dependency_objects( trans,
tool_path,
https://bitbucket.org/galaxy/galaxy-central/commits/395bfeb484ae/
changeset: 395bfeb484ae
user: inithello
date: 2013-01-07 18:21:46
summary: Tool shed functional test enhancements - more circular repository dependency tests.
affected #: 7 files
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -43,6 +43,10 @@
galaxy.model.ToolShedRepository.table.c.owner == owner,
galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
+def get_installed_repository_by_id( repository_id ):
+ return ga_session.query( galaxy.model.ToolShedRepository ) \
+ .filter( galaxy.model.ToolShedRepository.table.c.id == repository_id ) \
+ .first()
def get_installed_repository_by_name_owner( repository_name, owner ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
@@ -53,6 +57,10 @@
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_repository_by_id( repository_id ):
+ return sa_session.query( model.Repository ) \
+ .filter( model.Repository.table.c.id == repository_id ) \
+ .first()
def get_user( email ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.email==email ) \
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,14 +1,16 @@
import galaxy.webapps.community.util.hgweb_config
import galaxy.model as galaxy_model
-import common, string, os, re, test_db_util, simplejson
+import common, string, os, re, test_db_util, simplejson, logging, time
import galaxy.util as util
from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
-from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
from galaxy import eggs
eggs.require('mercurial')
from mercurial import hg, ui
+log = logging.getLogger( __name__ )
+
class ShedTwillTestCase( TwillTestCase ):
def setUp( self ):
# Security helper
@@ -429,7 +431,7 @@
if workflow_name not in strings_displayed:
strings_displayed.append( workflow_name )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def initiate_installation_process( self ):
+ def initiate_installation_process( self, install_tool_dependencies=False, install_repository_dependencies=True ):
html = self.last_page()
# Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
# installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
@@ -438,11 +440,20 @@
install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
if install_parameters:
iri_ids = install_parameters.group(1)
- encoded_kwd = install_parameters.group(2)
+ # In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']"
+ # This ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id will handle them correctly.
+ repository_ids = str( iri_ids )
+ repository_ids = re.sub( '[^a-fA-F0-9,]+', '', repository_ids )
+ decoded_kwd = tool_shed_decode( install_parameters.group(2) )
+ if 'install_tool_dependencies' in decoded_kwd:
+ decoded_kwd[ 'install_tool_dependencies' ] = install_tool_dependencies
+ if 'install_repository_dependencies' in decoded_kwd:
+ decoded_kwd[ 'install_repository_dependencies' ] = install_repository_dependencies
reinstalling = install_parameters.group(3)
url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
- ( iri_ids, encoded_kwd, reinstalling )
+ ( ','.join( util.listify( repository_ids ) ), tool_shed_encode( decoded_kwd ), reinstalling )
self.visit_galaxy_url( url )
+ return util.listify( repository_ids )
def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
changeset_revision=None, strings_displayed=[], strings_not_displayed=[],
preview_strings_displayed=[], post_submit_strings_displayed=[], **kwd ):
@@ -469,8 +480,8 @@
kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
- self.initiate_installation_process()
- self.wait_for_repository_installation( repository, changeset_revision )
+ repository_ids = self.initiate_installation_process( install_tool_dependencies )
+ self.wait_for_repository_installation( repository_ids )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
@@ -510,12 +521,17 @@
self.visit_galaxy_url( url )
strings_displayed = [ installed_repository.name, 'repository has been activated' ]
self.check_for_strings( strings_displayed, [] )
- def reinstall_repository( self, installed_repository ):
- url = '/admin_toolshed/reinstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
+ def reinstall_repository( self,
+ installed_repository,
+ install_repository_dependencies='true',
+ install_tool_dependencies='false' ):
+ url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
- self.initiate_installation_process()
- tool_shed_repository = test_db_util.get_repository_by_name_and_owner( installed_repository.name, installed_repository.owner )
- self.wait_for_repository_installation( tool_shed_repository, installed_repository.installed_changeset_revision )
+ url = '/admin_toolshed/reinstall_repository?id=%s&install_repository_dependencies=%s&install_repository_dependencies=%s' % \
+ ( self.security.encode_id( installed_repository.id ), install_repository_dependencies, install_repository_dependencies )
+ self.visit_galaxy_url( url )
+ repository_ids = self.initiate_installation_process( install_tool_dependencies, install_repository_dependencies )
+ self.wait_for_repository_installation( repository_ids )
def repository_is_new( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
@@ -636,22 +652,18 @@
def visit_galaxy_url( self, url ):
url = '%s%s' % ( self.galaxy_url, url )
self.visit_url( url )
- def wait_for_repository_installation( self, repository, changeset_revision ):
+ def wait_for_repository_installation( self, repository_ids ):
final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
- galaxy_model.ToolShedRepository.installation_status.INSTALLED,
- galaxy_model.ToolShedRepository.installation_status.UNINSTALLED,
- galaxy_model.ToolShedRepository.installation_status.DEACTIVATED ]
- repository_name = repository.name
- owner = repository.user.username
- if changeset_revision is None:
- changeset_revision = self.get_repository_tip( repository )
- galaxy_repository = test_db_util.get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
- timeout_counter = 0
- while galaxy_repository.status not in final_states:
- ga_refresh( galaxy_repository )
- timeout_counter = timeout_counter + 1
- if timeout_counter > common.repository_installation_timeout:
- raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
- ( timeout_counter, repository.status ) )
- break
- time.sleep( 1 )
+ galaxy_model.ToolShedRepository.installation_status.INSTALLED ]
+ if repository_ids:
+ for repository_id in repository_ids:
+ galaxy_repository = test_db_util.get_installed_repository_by_id( self.security.decode_id( repository_id ) )
+ timeout_counter = 0
+ while galaxy_repository.status not in final_states:
+ test_db_util.ga_refresh( galaxy_repository )
+ timeout_counter = timeout_counter + 1
+ if timeout_counter > common.repository_installation_timeout:
+ raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
+ ( timeout_counter, repository.status ) )
+ break
+ time.sleep( 1 )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -2,8 +2,8 @@
import tool_shed.base.test_db_util as test_db_util
freebayes_repository_name = 'freebayes_0040'
-freebayes_repository_name_description = "Galaxy's freebayes tool"
-freebayes_repository_name_long_description = "Long description of Galaxy's freebayes tool"
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
filtering_repository_name = 'filtering_0040'
filtering_repository_description = "Galaxy's filtering tool"
@@ -31,8 +31,8 @@
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.get_or_create_repository( name=freebayes_repository_name,
- description=freebayes_repository_name_description,
- long_description=freebayes_repository_name_long_description,
+ description=freebayes_repository_description,
+ long_description=freebayes_repository_long_description,
owner=common.test_user_1_name,
categories=[ 'test_0040_repository_circular_dependencies' ],
strings_displayed=[] )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/functional/test_0080_advanced_circular_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0080_advanced_circular_dependencies.py
@@ -0,0 +1,89 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_0080'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0080'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0080 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+class TestRepositoryCircularDependencies( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_initiate_category_repositories( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ def test_0020_create_repository_dependencies( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of freebayes to the filtering_0040 repository.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ convert_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Column maker depends on the convert repository.' )
+ self.upload_file( column_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on convert' )
+ def test_0025_create_dependency_on_filtering( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ column_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Convert chars depends on the column_maker repository.' )
+ self.upload_file( convert_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on column' )
+ def test_0030_verify_repository_dependencies( self ):
+ '''Verify that each repository can depend on the other without causing an infinite loop.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( convert_repository, column_repository, self.get_repository_tip( column_repository ) )
+ self.check_repository_dependency( column_repository, convert_repository, self.get_repository_tip( convert_repository ) )
+ def test_0035_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ for repository in [ column_repository, convert_repository ]:
+ self.verify_unchanged_repository_metadata( repository )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
@@ -0,0 +1,314 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_0080'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0080'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0080 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+ def test_0000_create_or_login_admin_user( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_initiate_test_data( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ convert_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Column maker depends on the convert repository.' )
+ self.upload_file( column_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on convert' )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ column_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Convert chars depends on the column_maker repository.' )
+ self.upload_file( convert_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on column' )
+ def test_0010_install_repositories( self ):
+ '''Install convert_chars with repository dependencies check box - this should install both convert_chars and column_maker.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( convert_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies='Yes',
+ new_tool_panel_section='test_1080' )
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ browse_strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision ]
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ strings_displayed.append( 'Installed repository dependencies' )
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0015_deactivate_convert_repository( self ):
+ '''Deactivate convert_chars - this should display column_maker as installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=False )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Deactivated' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0020_reactivate_convert_repository( self ):
+ '''Activate convert_chars - this should display both convert_chars and column_maker as installed with a green box'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reactivate_repository( installed_convert_repository )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ def test_0025_deactivate_column_repository( self ):
+ '''Deactivate column_maker - this should display convert_chars installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=False )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Deactivated' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0030_deactivate_convert_repository( self ):
+ '''Deactivate convert_chars - both convert_chars and column_maker are deactivated'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0035_reactivate_column_repository( self ):
+ '''Activate column_maker - this should not automatically activate convert_chars, so column_maker should be displayed as installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reactivate_repository( installed_column_repository )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Deactivated' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0040_reactivate_convert_repository( self ):
+ '''Activate convert_chars - this should display both convert_chars and column_maker as installed with a green box'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reactivate_repository( installed_convert_repository )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0045_uninstall_column_repository( self ):
+ '''Uninstall column_maker - this should display convert_chars installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Uninstalled' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0050_reinstall_column_repository( self ):
+ '''Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_column_repository, install_repository_dependencies=False )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0055_uninstall_convert_repository( self ):
+ '''Uninstall convert_chars, verify column_maker installed but missing repository dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Uninstalled' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0060_uninstall_column_repository( self ):
+ '''Uninstall column_maker - both convert_chars and column_maker are uninstalled'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Missing repository dependencies',
+ 'Activate or reinstall repository',
+ 'Uninstalled' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0065_reinstall_convert_repository( self ):
+ '''Reinstall convert_chars and check the handle repository dependencies check box - this should install both convert_chars and column_maker ( make sure )'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_convert_repository, install_repository_dependencies=True )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/test_data/column_maker/column_maker.tar
Binary file test/tool_shed/test_data/column_maker/column_maker.tar has changed
diff -r df20658ac4991d970bfd9d63feae73850db84bb7 -r 395bfeb484ae0f69bdd7be2115ed007b4c01dd90 test/tool_shed/test_data/convert_chars/convert_chars.tar
Binary file test/tool_shed/test_data/convert_chars/convert_chars.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2f51be0814ca/
changeset: 2f51be0814ca
user: dan
date: 2013-01-07 17:45:26
summary: Fix for workflow editor when a missing tool is connected to a valid tool. Javascript error would cause the 'Loading worflow editor...' modal window to spin indefinitely.
affected #: 1 file
diff -r eb4dac39eed53cc0ef1550ee63486358fd53ad59 -r 2f51be0814cafc106de5ef39878afd3333c90a35 static/scripts/galaxy.workflow_editor.canvas.js
--- a/static/scripts/galaxy.workflow_editor.canvas.js
+++ b/static/scripts/galaxy.workflow_editor.canvas.js
@@ -80,9 +80,13 @@
$.extend( Connector.prototype, {
connect: function ( t1, t2 ) {
this.handle1 = t1;
- this.handle1.connect( this );
+ if ( this.handle1 ) {
+ this.handle1.connect( this );
+ }
this.handle2 = t2;
- this.handle2.connect( this );
+ if ( this.handle2 ) {
+ this.handle2.connect( this );
+ }
},
destroy : function () {
if ( this.handle1 ) {
https://bitbucket.org/galaxy/galaxy-central/commits/ce62bf5a91f8/
changeset: ce62bf5a91f8
user: dan
date: 2013-01-07 17:45:30
summary: Pack scripts.
affected #: 1 file
diff -r 2f51be0814cafc106de5ef39878afd3333c90a35 -r ce62bf5a91f86d0c53764bda3df4975486d7512e static/scripts/packed/galaxy.workflow_editor.canvas.js
--- a/static/scripts/packed/galaxy.workflow_editor.canvas.js
+++ b/static/scripts/packed/galaxy.workflow_editor.canvas.js
@@ -1,1 +1,1 @@
-function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}OutputTerminal.prototype=new Terminal();function InputTerminal(b,c,a){Terminal.call(this,b);this.datatypes=c;this.multiple=a}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1||this.multiple){for(var c in this.datatypes){var f=new Array();f=f.concat(a.datatypes);if(a.node.post_job_actions){for(var d in a.node.post_job_actions){var g=a.node.post_job_actions[d];if(g.action_type=="ChangeDatatypeAction"&&(g.output_name==""||g.output_name==a.name)&&g.action_arguments){f.push(g.action_arguments.newtype)}}}for(var b in f){if(f[b]=="input"||issubtype(f[b],this.datatypes[c])){return true}}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a){this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};if(!this.handle1||!this.handle2){return}var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(f,b,c,a){var d=this;$(f).each(function(){var g=this.terminal=new InputTerminal(this,c,a);g.node=d;g.name=b;$(this).bind("dropinit",function(h,i){return $(i.drag).hasClass("output-terminal")&&g.can_accept(i.drag.terminal)}).bind("dropstart",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#BBFFBB"}}).bind("dropend",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#FFFFFF"}}).bind("drop",function(h,i){(new Connector(i.drag.terminal,g)).redraw()}).bind("hover",function(){if(g.connectors.length>0){var h=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(){$.each(g.connectors,function(j,i){if(i){i.destroy()}});h.remove()}))).bind("mouseleave",function(){$(this).remove()});h.css({top:$(this).offset().top-2,left:$(this).offset().left-h.width(),"padding-right":$(this).width()}).show()}});d.input_terminals[b]=g})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j,k){$(k.available).addClass("input-terminal-active");workflow.check_changes_in_active_form();var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var l=new Connector();l.dragging=true;l.connect(this.terminal,i.terminal);return i}).bind("drag",function(i,j){var h=function(){var l=$(j.proxy).offsetParent().offset(),k=j.offsetX-l.left,m=j.offsetY-l.top;$(j.proxy).css({left:k,top:m});j.proxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h,i){i.proxy.terminal.connectors[0].destroy();$(i.proxy).remove();$(i.available).removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(h){var g=this.element;if(h.type){this.type=h.type}this.name=h.name;this.form_html=h.form_html;this.tool_state=h.tool_state;this.tool_errors=h.tool_errors;this.tooltip=h.tooltip?h.tooltip:"";this.annotation=h.annotation;this.post_job_actions=h.post_job_actions?h.post_job_actions:{};this.workflow_outputs=h.workflow_outputs?h.workflow_outputs:[];if(this.tool_errors){g.addClass("tool-node-error")}else{g.removeClass("tool-node-error")}var d=this;var c=Math.max(150,g.width());var a=g.find(".toolFormBody");a.find("div").remove();var i=$("<div class='inputs'></div>").appendTo(a);$.each(h.data_inputs,function(k,f){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,f.name,f.extensions,f.multiple);var b=$("<div class='form-row dataRow input-data-row' name='"+f.name+"'>"+f.label+"</div>");b.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(b);c=Math.max(c,b.outerWidth());b.css({position:"",left:"",top:"",display:""});b.remove();i.append(b.prepend(j))});if((h.data_inputs.length>0)&&(h.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(h.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");d.enable_output_terminal(j,b.name,b.extensions);var f=b.name;if(b.extensions.indexOf("input")<0){f=f+" ("+b.extensions.join(", ")+")"}var m=$("<div class='form-row dataRow'>"+f+"</div>");if(d.type=="tool"){var l=$("<div class='callout "+f+"'></div>").css({display:"none"}).append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png").click(function(){if($.inArray(b.name,d.workflow_outputs)!=-1){d.workflow_outputs.splice($.inArray(b.name,d.workflow_outputs),1);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{d.workflow_outputs.push(b.name);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}workflow.has_changes=true;canvas_manager.draw_overview()}))).tooltip({delay:500,title:"Flag this as a workflow output. All non-flagged outputs will be hidden."});l.css({top:"50%",margin:"-8px 0px 0px 0px",right:8});l.show();m.append(l);if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}m.hover(function(){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-yellow.png")},function(){if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}})}m.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(m);c=Math.max(c,m.outerWidth()+17);m.css({position:"",left:"",top:"",display:""});m.detach();a.append(m.append(j))});g.css("width",Math.min(250,Math.max(g.width(),c)));workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;this.annotation=f.annotation;var g=$.parseJSON(f.post_job_actions);this.post_job_actions=g?g:{};if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var h=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=h.find("div.input-data-row");$.each(f.data_inputs,function(l,j){var k=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(k,j.name,j.extensions,j.multiple);h.find("div[name='"+j.name+"']").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){k[0].terminal.connectors[0]=i;i.handle2=k[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+j.name+"'>"+j.label+"</div>").prepend(k))});h.replaceWith(b);h.find("div.input-data-row > .terminal").each(function(){this.terminal.destroy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},rectify_workflow_outputs:function(){var b,a=false;$.each(this.nodes,function(c,d){if(d.workflow_outputs&&d.workflow_outputs.length>0){b=true}$.each(d.post_job_actions,function(g,f){if(f.action_type==="HideDatasetAction"){a=true}})});if(b!==false||a!==false){$.each(this.nodes,function(c,g){if(g.type==="tool"){var f=false;if(g.post_job_actions==null){g.post_job_actions={};f=true}var d=[];$.each(g.post_job_actions,function(i,h){if(h.action_type=="HideDatasetAction"){d.push(i)}});if(d.length>0&&g==workflow.active_node){$.each(d,function(h,j){f=true;delete g.post_job_actions[j]})}if(b){$.each(g.output_terminals,function(i,j){var h=true;$.each(g.workflow_outputs,function(l,m){if(j.name===m){h=false}});if(h===true){f=true;var k={action_type:"HideDatasetAction",output_name:j.name,action_arguments:{}};g.post_job_actions["HideDatasetAction"+j.name]=null;g.post_job_actions["HideDatasetAction"+j.name]=k}})}if(workflow.active_node==g&&f===true){workflow.reload_active_node()}}})}},to_simple:function(){var a={};$.each(this.nodes,function(c,f){var g={};$.each(f.input_terminals,function(i,j){g[j.name]=null;var h=[];$.each(j.connectors,function(k,l){h[k]={id:l.handle1.node.id,output_name:l.handle1.name};g[j.name]=h})});var b={};if(f.post_job_actions){$.each(f.post_job_actions,function(j,h){var k={action_type:h.action_type,output_name:h.output_name,action_arguments:h.action_arguments};b[h.action_type+h.output_name]=null;b[h.action_type+h.output_name]=k})}if(!f.workflow_outputs){f.workflow_outputs=[]}var d={id:f.id,type:f.type,tool_id:f.tool_id,tool_state:f.tool_state,tool_errors:f.tool_errors,input_connections:g,position:$(f.element).position(),annotation:f.annotation,post_job_actions:f.post_job_actions,workflow_outputs:f.workflow_outputs};a[f.id]=d});return{steps:a}},from_simple:function(b){wf=this;var c=0;wf.name=b.name;var a=false;$.each(b.steps,function(g,f){var d=prebuild_node("tool",f.name,f.tool_id);d.init_field_data(f);if(f.position){d.element.css({top:f.position.top,left:f.position.left})}d.id=f.id;wf.nodes[d.id]=d;c=Math.max(c,parseInt(g));if(!a&&d.type==="tool"){if(d.workflow_outputs.length>0){a=true}else{$.each(d.post_job_actions,function(i,h){if(h.action_type==="HideDatasetAction"){a=true}})}}});wf.id_counter=c+1;$.each(b.steps,function(g,f){var d=wf.nodes[g];$.each(f.input_connections,function(i,h){if(h){if($.isArray(h)){$.each(h,function(m,k){var n=wf.nodes[k.id];var o=new Connector();o.connect(n.output_terminals[k.output_name],d.input_terminals[i]);o.redraw()})}else{var j=wf.nodes[h.id];var l=new Connector();l.connect(j.output_terminals[h.output_name],d.input_terminals[i]);l.redraw()}}});if(a&&d.type==="tool"){$.each(d.output_terminals,function(h,i){if(d.post_job_actions["HideDatasetAction"+i.name]===undefined){d.workflow_outputs.push(i.name);callout=$(d.element).find(".callout."+i.name);callout.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png");workflow.has_changes=true}})}})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},reload_active_node:function(){if(this.active_node){var a=this.active_node;this.clear_active_node();this.activate_node(a)}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html+a.tooltip,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){this.check_changes_in_active_form();parent.show_form_for_tool(a.form_html+a.tooltip,a)}},layout:function(){this.check_changes_in_active_form();this.has_changes=true;var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='"+galaxy_paths.attributes.image_path+"/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(b){g.destroy()}).hover(function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon_dark.png")},function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o,p){var f=$(this).offsetParent().offset(),b=p.offsetX-f.left,s=p.offsetY-f.top;$(this).css({left:b,top:s});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(){var g=$(this).offset();var f=b.cc.position();c=f.top-g.top;d=f.left-g.left}).bind("drag",function(f,g){a(g.offsetX+d,g.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k,l){var h=b.cc.width(),n=b.cc.height(),m=b.oc.width(),j=b.oc.height(),f=$(this).offsetParent().offset(),i=l.offsetX-f.left,g=l.offsetY-f.top;a(-(i/m*h),-(g/j*n))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g,i){var j=$(this).offsetParent();var h=j.offset();var f=Math.max(j.width()-(i.offsetX-h.left),j.height()-(i.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);$.each(workflow.nodes,function(t,q){i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;if(q.tool_errors){i.fillStyle="#FFCCCC";i.strokeStyle="#AA6666"}else{if(q.workflow_outputs!=undefined&&q.workflow_outputs.length>0){i.fillStyle="#E8A92D";i.strokeStyle="#E8A92D"}}i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
+function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}OutputTerminal.prototype=new Terminal();function InputTerminal(b,c,a){Terminal.call(this,b);this.datatypes=c;this.multiple=a}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1||this.multiple){for(var c in this.datatypes){var f=new Array();f=f.concat(a.datatypes);if(a.node.post_job_actions){for(var d in a.node.post_job_actions){var g=a.node.post_job_actions[d];if(g.action_type=="ChangeDatatypeAction"&&(g.output_name==""||g.output_name==a.name)&&g.action_arguments){f.push(g.action_arguments.newtype)}}}for(var b in f){if(f[b]=="input"||issubtype(f[b],this.datatypes[c])){return true}}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a){this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;if(this.handle1){this.handle1.connect(this)}this.handle2=a;if(this.handle2){this.handle2.connect(this)}},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};if(!this.handle1||!this.handle2){return}var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(f,b,c,a){var d=this;$(f).each(function(){var g=this.terminal=new InputTerminal(this,c,a);g.node=d;g.name=b;$(this).bind("dropinit",function(h,i){return $(i.drag).hasClass("output-terminal")&&g.can_accept(i.drag.terminal)}).bind("dropstart",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#BBFFBB"}}).bind("dropend",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#FFFFFF"}}).bind("drop",function(h,i){(new Connector(i.drag.terminal,g)).redraw()}).bind("hover",function(){if(g.connectors.length>0){var h=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(){$.each(g.connectors,function(j,i){if(i){i.destroy()}});h.remove()}))).bind("mouseleave",function(){$(this).remove()});h.css({top:$(this).offset().top-2,left:$(this).offset().left-h.width(),"padding-right":$(this).width()}).show()}});d.input_terminals[b]=g})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j,k){$(k.available).addClass("input-terminal-active");workflow.check_changes_in_active_form();var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var l=new Connector();l.dragging=true;l.connect(this.terminal,i.terminal);return i}).bind("drag",function(i,j){var h=function(){var l=$(j.proxy).offsetParent().offset(),k=j.offsetX-l.left,m=j.offsetY-l.top;$(j.proxy).css({left:k,top:m});j.proxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h,i){i.proxy.terminal.connectors[0].destroy();$(i.proxy).remove();$(i.available).removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(h){var g=this.element;if(h.type){this.type=h.type}this.name=h.name;this.form_html=h.form_html;this.tool_state=h.tool_state;this.tool_errors=h.tool_errors;this.tooltip=h.tooltip?h.tooltip:"";this.annotation=h.annotation;this.post_job_actions=h.post_job_actions?h.post_job_actions:{};this.workflow_outputs=h.workflow_outputs?h.workflow_outputs:[];if(this.tool_errors){g.addClass("tool-node-error")}else{g.removeClass("tool-node-error")}var d=this;var c=Math.max(150,g.width());var a=g.find(".toolFormBody");a.find("div").remove();var i=$("<div class='inputs'></div>").appendTo(a);$.each(h.data_inputs,function(k,f){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,f.name,f.extensions,f.multiple);var b=$("<div class='form-row dataRow input-data-row' name='"+f.name+"'>"+f.label+"</div>");b.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(b);c=Math.max(c,b.outerWidth());b.css({position:"",left:"",top:"",display:""});b.remove();i.append(b.prepend(j))});if((h.data_inputs.length>0)&&(h.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(h.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");d.enable_output_terminal(j,b.name,b.extensions);var f=b.name;if(b.extensions.indexOf("input")<0){f=f+" ("+b.extensions.join(", ")+")"}var m=$("<div class='form-row dataRow'>"+f+"</div>");if(d.type=="tool"){var l=$("<div class='callout "+f+"'></div>").css({display:"none"}).append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png").click(function(){if($.inArray(b.name,d.workflow_outputs)!=-1){d.workflow_outputs.splice($.inArray(b.name,d.workflow_outputs),1);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{d.workflow_outputs.push(b.name);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}workflow.has_changes=true;canvas_manager.draw_overview()}))).tooltip({delay:500,title:"Flag this as a workflow output. All non-flagged outputs will be hidden."});l.css({top:"50%",margin:"-8px 0px 0px 0px",right:8});l.show();m.append(l);if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}m.hover(function(){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-yellow.png")},function(){if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}})}m.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(m);c=Math.max(c,m.outerWidth()+17);m.css({position:"",left:"",top:"",display:""});m.detach();a.append(m.append(j))});g.css("width",Math.min(250,Math.max(g.width(),c)));workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;this.annotation=f.annotation;var g=$.parseJSON(f.post_job_actions);this.post_job_actions=g?g:{};if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var h=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=h.find("div.input-data-row");$.each(f.data_inputs,function(l,j){var k=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(k,j.name,j.extensions,j.multiple);h.find("div[name='"+j.name+"']").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){k[0].terminal.connectors[0]=i;i.handle2=k[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+j.name+"'>"+j.label+"</div>").prepend(k))});h.replaceWith(b);h.find("div.input-data-row > .terminal").each(function(){this.terminal.destroy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},rectify_workflow_outputs:function(){var b,a=false;$.each(this.nodes,function(c,d){if(d.workflow_outputs&&d.workflow_outputs.length>0){b=true}$.each(d.post_job_actions,function(g,f){if(f.action_type==="HideDatasetAction"){a=true}})});if(b!==false||a!==false){$.each(this.nodes,function(c,g){if(g.type==="tool"){var f=false;if(g.post_job_actions==null){g.post_job_actions={};f=true}var d=[];$.each(g.post_job_actions,function(i,h){if(h.action_type=="HideDatasetAction"){d.push(i)}});if(d.length>0&&g==workflow.active_node){$.each(d,function(h,j){f=true;delete g.post_job_actions[j]})}if(b){$.each(g.output_terminals,function(i,j){var h=true;$.each(g.workflow_outputs,function(l,m){if(j.name===m){h=false}});if(h===true){f=true;var k={action_type:"HideDatasetAction",output_name:j.name,action_arguments:{}};g.post_job_actions["HideDatasetAction"+j.name]=null;g.post_job_actions["HideDatasetAction"+j.name]=k}})}if(workflow.active_node==g&&f===true){workflow.reload_active_node()}}})}},to_simple:function(){var a={};$.each(this.nodes,function(c,f){var g={};$.each(f.input_terminals,function(i,j){g[j.name]=null;var h=[];$.each(j.connectors,function(k,l){h[k]={id:l.handle1.node.id,output_name:l.handle1.name};g[j.name]=h})});var b={};if(f.post_job_actions){$.each(f.post_job_actions,function(j,h){var k={action_type:h.action_type,output_name:h.output_name,action_arguments:h.action_arguments};b[h.action_type+h.output_name]=null;b[h.action_type+h.output_name]=k})}if(!f.workflow_outputs){f.workflow_outputs=[]}var d={id:f.id,type:f.type,tool_id:f.tool_id,tool_state:f.tool_state,tool_errors:f.tool_errors,input_connections:g,position:$(f.element).position(),annotation:f.annotation,post_job_actions:f.post_job_actions,workflow_outputs:f.workflow_outputs};a[f.id]=d});return{steps:a}},from_simple:function(b){wf=this;var c=0;wf.name=b.name;var a=false;$.each(b.steps,function(g,f){var d=prebuild_node("tool",f.name,f.tool_id);d.init_field_data(f);if(f.position){d.element.css({top:f.position.top,left:f.position.left})}d.id=f.id;wf.nodes[d.id]=d;c=Math.max(c,parseInt(g));if(!a&&d.type==="tool"){if(d.workflow_outputs.length>0){a=true}else{$.each(d.post_job_actions,function(i,h){if(h.action_type==="HideDatasetAction"){a=true}})}}});wf.id_counter=c+1;$.each(b.steps,function(g,f){var d=wf.nodes[g];$.each(f.input_connections,function(i,h){if(h){if($.isArray(h)){$.each(h,function(m,k){var n=wf.nodes[k.id];var o=new Connector();o.connect(n.output_terminals[k.output_name],d.input_terminals[i]);o.redraw()})}else{var j=wf.nodes[h.id];var l=new Connector();l.connect(j.output_terminals[h.output_name],d.input_terminals[i]);l.redraw()}}});if(a&&d.type==="tool"){$.each(d.output_terminals,function(h,i){if(d.post_job_actions["HideDatasetAction"+i.name]===undefined){d.workflow_outputs.push(i.name);callout=$(d.element).find(".callout."+i.name);callout.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png");workflow.has_changes=true}})}})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},reload_active_node:function(){if(this.active_node){var a=this.active_node;this.clear_active_node();this.activate_node(a)}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html+a.tooltip,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){this.check_changes_in_active_form();parent.show_form_for_tool(a.form_html+a.tooltip,a)}},layout:function(){this.check_changes_in_active_form();this.has_changes=true;var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='"+galaxy_paths.attributes.image_path+"/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(b){g.destroy()}).hover(function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon_dark.png")},function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o,p){var f=$(this).offsetParent().offset(),b=p.offsetX-f.left,s=p.offsetY-f.top;$(this).css({left:b,top:s});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(){var g=$(this).offset();var f=b.cc.position();c=f.top-g.top;d=f.left-g.left}).bind("drag",function(f,g){a(g.offsetX+d,g.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k,l){var h=b.cc.width(),n=b.cc.height(),m=b.oc.width(),j=b.oc.height(),f=$(this).offsetParent().offset(),i=l.offsetX-f.left,g=l.offsetY-f.top;a(-(i/m*h),-(g/j*n))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g,i){var j=$(this).offsetParent();var h=j.offset();var f=Math.max(j.width()-(i.offsetX-h.left),j.height()-(i.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);$.each(workflow.nodes,function(t,q){i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;if(q.tool_errors){i.fillStyle="#FFCCCC";i.strokeStyle="#AA6666"}else{if(q.workflow_outputs!=undefined&&q.workflow_outputs.length>0){i.fillStyle="#E8A92D";i.strokeStyle="#E8A92D"}}i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0