galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2013
- 1 participants
- 183 discussions
commit/galaxy-central: greg: Fix a few typos in my last commit.
by commits-noreply@bitbucket.org 25 Mar '13
by commits-noreply@bitbucket.org 25 Mar '13
25 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7df0d7eb3a06/
Changeset: 7df0d7eb3a06
User: greg
Date: 2013-03-25 22:18:17
Summary: Fix a few typos in my last commit.
Affected #: 3 files
diff -r efe376544e2263ec8f9788d71d25dd2b4b49a39d -r 7df0d7eb3a0670a299d571683daba26cbc692f0b lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -168,7 +168,7 @@
shed_tool_conf = payload.get( 'shed_tool_conf', None )
tool_path = payload.get( 'tool_path', None )
tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
- if tool_panel_section_id is not None:
+ if tool_panel_section_id not in [ None, '' ]:
tool_panel_section = trans.app.toolbox.tool_panel[ tool_panel_section_id ]
else:
tool_panel_section = ''
diff -r efe376544e2263ec8f9788d71d25dd2b4b49a39d -r 7df0d7eb3a0670a299d571683daba26cbc692f0b scripts/api/install_repository_tools_into_new_tool_panel_section.py
--- a/scripts/api/install_repository_tools_into_new_tool_panel_section.py
+++ b/scripts/api/install_repository_tools_into_new_tool_panel_section.py
@@ -4,10 +4,10 @@
valid tools, loading them into a new section of the Galaxy tool panel. The repository has no tool dependencies or repository dependencies, so only
a single repository will be installed.
-usage: ./install_repository_tools_into_existing_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision new_tool_panel_section_label
+usage: ./install_repository_tools_into_new_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision new_tool_panel_section_label
Here is a working example of how to use this script to install a repository from the test tool shed.
-./install_repository_tools_into_existing_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb From%20Test%20Tool%20Shed
+./install_repository_tools_into_new_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb From%20Test%20Tool%20Shed
"""
import os
diff -r efe376544e2263ec8f9788d71d25dd2b4b49a39d -r 7df0d7eb3a0670a299d571683daba26cbc692f0b scripts/api/install_repository_with_repository_dependencies.py
--- a/scripts/api/install_repository_with_repository_dependencies.py
+++ b/scripts/api/install_repository_with_repository_dependencies.py
@@ -4,10 +4,10 @@
repository dependencies, so multiple repositories will ultimately be installed. Since no Galaxy tool panel section information is used, all tools
contained in the installed repositories will be loaded into the Galaxy tool panel outside of any sections.
-usage: ./install_repository_tools_into_existing_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision True
+usage: ./install_repository_with_repository_dependencies.py <api_key <galaxy base url> tool_shed_url name owner changeset_revision True
Here is a working example of how to use this script to install a repository from the test tool shed.
-./install_repository_tools_into_existing_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu emboss_5 devteam 8ddad0c9a75a True
+./install_repository_with_repository_dependencies.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu emboss_5 devteam 8ddad0c9a75a True
"""
import os
@@ -32,4 +32,4 @@
print 'usage: %s key url tool_shed_url name owner changeset_revision install_repository_dependencies' % os.path.basename( sys.argv[ 0 ] )
sys.exit( 1 )
-submit( sys.argv[ 1 ], sys.argv[ 2 ], data )
\ No newline at end of file
+submit( sys.argv[ 1 ], sys.argv[ 2 ], data )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add baseline cupport for installing tool shed repositories from a specified tool shed into Galaxy via a combination of the tool shed API and the now enhanced Galaxy API. There are 3 new example scripts in ~/scripts/api that demonstrate how this works. The are named like: install_repository...
by commits-noreply@bitbucket.org 25 Mar '13
by commits-noreply@bitbucket.org 25 Mar '13
25 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/efe376544e22/
Changeset: efe376544e22
User: greg
Date: 2013-03-25 22:05:14
Summary: Add baseline cupport for installing tool shed repositories from a specified tool shed into Galaxy via a combination of the tool shed API and the now enhanced Galaxy API. There are 3 new example scripts in ~/scripts/api that demonstrate how this works. The are named like: install_repository...
In this implementation, all tools contained in repositories will be loaded into the Galaxy tool panel.
The user can specify the following when sending a POST to the new Galaxy API URL:
/api/tool_shed_repositories/new/install_repository_revision
1) an existing tool panel section id for loading tools or a new tool panel section label in which case a new ToolPanelSection will be created for containing the tools.
2) if they want tool dependencies installed along with the repository.
3) if they want repository dependencies installed along with the repository
4) which defined shed-related tool panel config file to use for installed tools
Affected #: 10 files
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -0,0 +1,324 @@
+import logging
+import urllib2
+from galaxy.util import json
+from galaxy import util
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+from tool_shed.galaxy_install import repository_util
+import tool_shed.util.shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ):
+ value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
+ 'error_message' : tool_shed_repository.error_message or '' }
+ return value_mapper
+
+def get_message_for_no_shed_tool_config():
+ # This Galaxy instance is not configured with a shed-related tool panel configuration file.
+ message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> '
+ message += 'tag that includes a tool_path attribute value which is a directory relative to the Galaxy installation directory in order to '
+ message += 'automatically install tools from a tool shed into Galaxy (e.g., the file name shed_tool_conf.xml whose <toolbox> tag is '
+ message += '<toolbox tool_path="../shed_tools">). For details, see the "Installation of Galaxy tool shed repository tools into a local '
+ message += 'Galaxy instance" section of the Galaxy tool shed wiki at http://wiki.galaxyproject.org/InstallingRepositoriesToGalaxy#'
+ message += 'Installing_Galaxy_tool_shed_repository_tools_into_a_local_Galaxy_instance.'
+ return message
+
+class ToolShedRepositoriesController( BaseAPIController ):
+ """RESTful controller for interactions with tool shed repositories."""
+
+ @web.expose_api
+ def index( self, trans, **kwd ):
+ """
+ GET /api/tool_shed_repositories
+ Display a list of dictionaries containing information about installed tool shed repositories.
+ """
+ # Example URL: http://localhost:8763/api/tool_shed_repositories
+ tool_shed_repository_dicts = []
+ try:
+ query = trans.sa_session.query( trans.app.model.ToolShedRepository ) \
+ .order_by( trans.app.model.ToolShedRepository.table.c.name ) \
+ .all()
+ for tool_shed_repository in query:
+ tool_shed_repository_dict = tool_shed_repository.get_api_value( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ tool_shed_repository_dicts.append( tool_shed_repository_dict )
+ return tool_shed_repository_dicts
+ except Exception, e:
+ message = "Error in the tool_shed_repositories API in index: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
+ Display a dictionary containing information about a specified tool_shed_repository.
+
+ :param tool_shed_repository_id: the encoded id of the `ToolShedRepository` object
+ """
+ # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
+ try:
+ tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ return tool_shed_repository_dict
+ except Exception, e:
+ message = "Error in tool_shed_repositories API in index: " + str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api
+ def install_repository_revision( self, trans, payload, **kwd ):
+ """
+ POST /api/tool_shed_repositories/install_repository_revision
+ Install a specified repository revision from a specified tool shed into Galaxy.
+
+ :param tool_shed_url: the base URL of the Tool Shed from which to install the Repository
+ :param name: the name of the Repository
+ :param owner: the owner of the Repository
+ :param changset_revision: the changset_revision of the RepositoryMetadata object associated with the Repository
+ :param key: the current Galaxy admin user's API key
+ :param new_tool_panel_section_label: optional label of a new section to be added to the Galaxy tool panel in which to load
+ tools contained in the Repository. Either this parameter must be an empty string or
+ the tool_panel_section_id parameter must be an empty string, as both cannot be used.
+ :param tool_panel_section_id: optional id of the Galaxy tool panel section in which to load tools contained in the Repository.
+ If not set, tools will be loaded outside of any sections in the tool panel. Either this
+ parameter must be an empty string or the tool_panel_section_id parameter must be an empty string,
+ as both cannot be used.
+ """
+ # Get the information about the repository to be installed from the payload.
+ tool_shed_url = payload.get( 'tool_shed_url', '' )
+ if not tool_shed_url:
+ raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+ name = payload.get( 'name', '' )
+ if not name:
+ raise HTTPBadRequest( detail="Missing required parameter 'name'." )
+ owner = payload.get( 'owner', '' )
+ if not owner:
+ raise HTTPBadRequest( detail="Missing required parameter 'owner'." )
+ changeset_revision = payload.get( 'changeset_revision', '' )
+ if not changeset_revision:
+ raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
+ # Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
+ if not suc.have_shed_tool_conf_for_install( trans ):
+ message = get_message_for_no_shed_tool_config()
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
+ # Keep track of all repositories that are installed - there may be more than one if repository dependencies are installed.
+ installed_tool_shed_repositories = []
+ # Get all of the information necessary for installing the repository from the specified tool shed.
+ url = suc.url_join( tool_shed_url,
+ 'api/repositories/get_repository_revision_install_info?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, owner, changeset_revision ) )
+ try:
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ except Exception, e:
+ message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
+ ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ), str( e ) )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ if raw_text:
+ items = json.from_json_string( raw_text )
+ repository_dict = items[ 0 ]
+ repository_revision_dict = items[ 1 ]
+ repo_info_dict = items[ 2 ]
+ else:
+ message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
+ ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ) )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ repo_info_dicts = [ repo_info_dict ]
+ # Make sure the tool shed returned everything we need for installing the repository.
+ try:
+ has_repository_dependencies = repository_revision_dict[ 'has_repository_dependencies' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'has_repository_dependencies'." )
+ try:
+ includes_tools = repository_revision_dict[ 'includes_tools' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'includes_tools'." )
+ try:
+ includes_tool_dependencies = repository_revision_dict[ 'includes_tool_dependencies' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'includes_tool_dependencies'." )
+ try:
+ includes_tools_for_display_in_tool_panel = repository_revision_dict[ 'includes_tools_for_display_in_tool_panel' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'includes_tools_for_display_in_tool_panel'." )
+ # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain the repository
+ # information.
+ install_repository_dependencies = payload.get( 'install_repository_dependencies', False )
+ install_tool_dependencies = payload.get( 'install_tool_dependencies', False )
+ new_tool_panel_section = payload.get( 'new_tool_panel_section_label', '' )
+ shed_tool_conf = payload.get( 'shed_tool_conf', None )
+ tool_path = payload.get( 'tool_path', None )
+ tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
+ if tool_panel_section_id is not None:
+ tool_panel_section = trans.app.toolbox.tool_panel[ tool_panel_section_id ]
+ else:
+ tool_panel_section = ''
+ if not shed_tool_conf or not tool_path:
+ # Pick a semi-random shed-related tool panel configuration file.
+ for shed_config_dict in trans.app.toolbox.shed_tool_confs:
+ if shed_config_dict[ 'config_filename' ] != trans.app.config.migrated_tools_config:
+ break
+ shed_tool_conf = shed_config_dict[ 'config_filename' ]
+ tool_path = shed_config_dict[ 'tool_path' ]
+ if not shed_tool_conf:
+ raise HTTPBadRequest( detail="Missing required parameter 'shed_tool_conf'." )
+ if not tool_path:
+ raise HTTPBadRequest( detail="Missing required parameter 'tool_path'." )
+ # Build the dictionary of information necessary for creating tool_shed_repository database records for each repository being installed.
+ installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
+ new_tool_panel_section=new_tool_panel_section,
+ no_changes_checked=False,
+ reinstalling=False,
+ repo_info_dicts=repo_info_dicts,
+ tool_panel_section=tool_panel_section,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url )
+ # Create the tool_shed_repository database records and gather additional information for repository installation.
+ created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
+ repository_util.handle_tool_shed_repositories( trans, installation_dict, using_api=True )
+ if message and len( repo_info_dicts ) == 1:
+ # We're attempting to install a single repository that has already been installed into this Galaxy instance.
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ if created_or_updated_tool_shed_repositories:
+ # Build the dictionary of information necessary for installing the repositories.
+ installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories,
+ filtered_repo_info_dicts=filtered_repo_info_dicts,
+ has_repository_dependencies=has_repository_dependencies,
+ includes_tool_dependencies=includes_tool_dependencies,
+ includes_tools=includes_tools,
+ includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+ install_repository_dependencies=install_repository_dependencies,
+ install_tool_dependencies=install_tool_dependencies,
+ message='',
+ new_tool_panel_section=new_tool_panel_section,
+ shed_tool_conf=shed_tool_conf,
+ status='done',
+ tool_panel_section=tool_panel_section,
+ tool_panel_section_keys=tool_panel_section_keys,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url )
+ # Prepare the repositories for installation. Even though this method receives a single combination of tool_shed_url, name, owner and
+ # changeset_revision, there may be multiple repositories for installation at this point because repository dependencies may have added
+ # additional repositories for installation along with the single specified repository.
+ encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = repository_util.initiate_repository_installation( trans, installation_dict )
+ # Install the repositories, keeping track of each one for later display.
+ for index, tool_shed_repository in enumerate( tool_shed_repositories ):
+ repo_info_dict = repo_info_dicts[ index ]
+ tool_panel_section_key = tool_panel_section_keys[ index ]
+ repository_util.install_tool_shed_repository( trans,
+ tool_shed_repository,
+ repo_info_dict,
+ tool_panel_section_key,
+ shed_tool_conf,
+ tool_path,
+ install_tool_dependencies,
+ reinstalling=False )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ installed_tool_shed_repositories.append( tool_shed_repository_dict )
+ else:
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ # Display the list of installed repositories.
+ return installed_tool_shed_repositories
+
+ @web.expose_api
+ def install_repository_revisions( self, trans, payload, **kwd ):
+ """
+ POST /api/tool_shed_repositories/install_repository_revisions
+ Install one or more specified repository revisions from one or more specified tool sheds into Galaxy. The received parameters
+ must be ordered lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.
+
+ It's questionable whether this method is needed as the above method for installing a single repository can probably cover all
+ desired scenarios. We'll keep this one around just in case...
+
+ :param tool_shed_urls: the base URLs of the Tool Sheds from which to install a specified Repository
+ :param names: the names of the Repositories to be installed
+ :param owners: the owners of the Repositories to be installed
+ :param changset_revisions: the changset_revisions of each RepositoryMetadata object associated with each Repository to be installed
+ :param key: the current Galaxy admin user's API key
+ :param new_tool_panel_section_label: optional label of a new section to be added to the Galaxy tool panel in which to load
+ tools contained in the Repository. Either this parameter must be an empty string or
+ the tool_panel_section_id parameter must be an empty string, as both cannot be used.
+ :param tool_panel_section_id: optional id of the Galaxy tool panel section in which to load tools contained in the Repository.
+ If not set, tools will be loaded outside of any sections in the tool panel. Either this
+ parameter must be an empty string or the tool_panel_section_id parameter must be an empty string,
+ as both cannot be used.
+ """
+ if not suc.have_shed_tool_conf_for_install( trans ):
+ # This Galaxy instance is not configured with a shed-related tool panel configuration file.
+ message = get_message_for_no_shed_tool_config()
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
+ # Get the information about all of the repositories to be installed.
+ tool_shed_urls = util.listify( payload.get( 'tool_shed_urls', '' ) )
+ names = util.listify( payload.get( 'names', '' ) )
+ owners = util.listify( payload.get( 'owners', '' ) )
+ changeset_revisions = util.listify( payload.get( 'changeset_revisions', '' ) )
+ num_specified_repositories = len( tool_shed_urls )
+ if len( names ) != num_specified_repositories or \
+ len( owners ) != num_specified_repositories or \
+ len( changeset_revisions ) != num_specified_repositories:
+ message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered '
+ message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.'
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information
+ # about each of the repositories being installed.
+ # TODO: we may want to enhance this method to allow for each of the following to be associated with each repository instead of
+ # forcing all repositories to use the same settings.
+ install_repository_dependencies = payload.get( 'install_repository_dependencies', False )
+ install_tool_dependencies = payload.get( 'install_tool_dependencies', False )
+ new_tool_panel_section = payload.get( 'new_tool_panel_section_label', '' )
+ shed_tool_conf = payload.get( 'shed_tool_conf', None )
+ tool_path = payload.get( 'tool_path', None )
+ tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
+ all_installed_tool_shed_repositories = []
+ for index, tool_shed_url in enumerate( tool_shed_urls ):
+ current_payload = {}
+ current_payload[ 'tool_shed_url' ] = tool_shed_url
+ current_payload[ 'name' ] = names[ index ]
+ current_payload[ 'owner' ] = owners[ index ]
+ current_payload[ 'changeset_revision' ] = changeset_revisions[ index ]
+ current_payload[ 'install_repository_dependencies' ] = install_repository_dependencies
+ current_payload[ 'install_tool_dependencies' ] = install_tool_dependencies
+ current_payload[ 'new_tool_panel_section' ] = new_tool_panel_section
+ current_payload[ 'shed_tool_conf' ] = shed_tool_conf
+ current_payload[ 'tool_path' ] = tool_path
+ current_payload[ 'tool_panel_section_id' ] = tool_panel_section_id
+ installed_tool_shed_repositories = self.install_repository_revision( trans, **current_payload )
+ if isinstance( installed_tool_shed_repositories, dict ):
+ # We encountered an error.
+ return installed_tool_shed_repositories
+ elif isinstance( installed_tool_shed_repositories, list ):
+ all_installed_tool_shed_repositories.extend( installed_tool_shed_repositories )
+ return all_installed_tool_shed_repositories
+
\ No newline at end of file
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -104,12 +104,6 @@
name_prefix='group_',
path_prefix='/api/groups/:group_id',
parent_resources=dict( member_name='group', collection_name='groups' ) )
- webapp.api_mapper.resource( 'content',
- 'contents',
- controller='tool_shed_repository_contents',
- name_prefix='tool_shed_repository_',
- path_prefix='/api/tool_shed_repositories/:tool_shed_repository_id',
- parent_resources=dict( member_name='tool_shed_repository', collection_name='tool_shed_repositories' ) )
_add_item_tags_controller( webapp,
name_prefix="history_content_",
path_prefix='/api/histories/:history_id/contents/:history_content_id' )
@@ -142,7 +136,6 @@
webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
webapp.api_mapper.resource( 'genome', 'genomes', path_prefix='/api' )
- webapp.api_mapper.resource( 'tool_shed_repository', 'tool_shed_repositories', path_prefix='/api' )
webapp.api_mapper.resource( 'visualization', 'visualizations', path_prefix='/api' )
webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
@@ -155,7 +148,14 @@
webapp.api_mapper.connect("workflow_dict", '/api/workflows/{workflow_id}/download', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
# Preserve the following download route for now for dependent applications -- deprecate at some point
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
-
+ # Galaxy API for tool shed features.
+ webapp.api_mapper.resource( 'tool_shed_repository',
+ 'tool_shed_repositories',
+ controller='tool_shed_repositories',
+ name_prefix='tool_shed_repository_',
+ path_prefix='/api',
+ new={ 'install_repository_revision' : 'POST' },
+ parent_resources=dict( member_name='tool_shed_repository', collection_name='tool_shed_repositories' ) )
# Connect logger from app
if app.trace_logger:
webapp.trace_logger = app.trace_logger
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -846,7 +846,7 @@
tool_path=tool_path,
tool_shed_url=tool_shed_url )
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- repository_util.handle_tool_shed_repositories( trans, installation_dict )
+ repository_util.handle_tool_shed_repositories( trans, installation_dict, using_api=False )
if message and len( repo_info_dicts ) == 1:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -1,9 +1,10 @@
import logging
from galaxy.web.framework.helpers import time_ago
-import tool_shed.util.shed_util_common as suc
from galaxy import web
from galaxy import util
from galaxy.web.base.controller import BaseAPIController
+import tool_shed.util.shed_util_common as suc
+from tool_shed.galaxy_install import repository_util
from galaxy import eggs
import pkg_resources
@@ -29,7 +30,7 @@
class RepositoriesController( BaseAPIController ):
"""RESTful controller for interactions with repositories in the Tool Shed."""
- @web.expose_api
+ @web.expose_api_anonymous
def index( self, trans, deleted=False, **kwd ):
"""
GET /api/repositories
@@ -56,7 +57,7 @@
trans.response.status = 500
return message
- @web.expose_api
+ @web.expose_api_anonymous
def show( self, trans, id, **kwd ):
"""
GET /api/repositories/{encoded_repository_id}
@@ -78,17 +79,59 @@
trans.response.status = 500
return message
- @web.expose_api
- def get_repository_and_revision( self, trans, name, owner, changeset_revision, **kwd ):
+ @web.expose_api_anonymous
+ def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ):
"""
- GET /api/repository/get_repository_and_revision
- Returns information about a repository revision in the Tool Shed.
+ GET /api/repository/get_repository_revision_install_info
- :param name: the name of the Repository object
- :param owner: the owner of the Repository object
- :param changset_revision: the changset_revision of the RepositoryMetadata object associated with the Repository object
+ :param name: the name of the Repository
+ :param owner: the owner of the Repository
+ :param changset_revision: the changset_revision of the RepositoryMetadata object associated with the Repository
+
+ Returns a list of the following dictionaries::
+ - a dictionary defining the Repository. For example:
+ {
+ "deleted": false,
+ "deprecated": false,
+ "description": "add_column hello",
+ "id": "f9cad7b01a472135",
+ "long_description": "add_column hello",
+ "name": "add_column",
+ "owner": "test",
+ "private": false,
+ "times_downloaded": 6,
+ "url": "/api/repositories/f9cad7b01a472135",
+ "user_id": "f9cad7b01a472135"
+ }
+ - a dictionary defining the Repsoitory revision (RepositoryMetadata). For example:
+ {
+ "changeset_revision": "3a08cc21466f",
+ "downloadable": true,
+ "has_repository_dependencies": false,
+ "id": "f9cad7b01a472135",
+ "includes_datatypes": false,
+ "includes_tool_dependencies": false,
+ "includes_tools": true,
+ "includes_tools_for_display_in_tool_panel": true,
+ "includes_workflows": false,
+ "malicious": false,
+ "repository_id": "f9cad7b01a472135",
+ "url": "/api/repository_revisions/f9cad7b01a472135"
+ }
+ - a dictionary including the additional information required to install the repository. For example:
+ {
+ "add_column": [
+ "add_column hello",
+ "http://test@localhost:9009/repos/test/add_column",
+ "3a08cc21466f",
+ "1",
+ "test",
+ {},
+ {}
+ ]
+ }
"""
- # Example URL: http://localhost:9009/api/repositories/get_repository_and_revision?name=add…
+ # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info…
try:
# Get the repository information.
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
@@ -106,6 +149,7 @@
repo = hg.repository( suc.get_configured_ui(), repo_dir )
new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision )
+ changeset_revision = new_changeset_revision
if repository_metadata:
encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
repository_metadata_dict = repository_metadata.get_api_value( view='collection',
@@ -113,14 +157,17 @@
repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
action='show',
id=encoded_repository_metadata_id )
- return repository_dict, repository_metadata_dict
+ # Get the repo_info_dict for installing the repository.
+ repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, has_repository_dependencies = \
+ repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision )
+ return repository_dict, repository_metadata_dict, repo_info_dict
else:
message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision )
log.error( message, exc_info=True )
trans.response.status = 500
- return repository_dict, {}
+ return repository_dict, {}, {}
except Exception, e:
- message = "Error in the Tool Shed repositories API in get_repository_and_revision: %s" % str( e )
+ message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -77,7 +77,7 @@
webapp.api_mapper.resource( 'repository',
'repositories',
controller='repositories',
- collection={ 'get_repository_and_revision' : 'GET' },
+ collection={ 'get_repository_revision_install_info' : 'GET' },
name_prefix='repository_',
path_prefix='/api',
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -252,7 +252,7 @@
# Load proprietary datatype display applications
trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
-def handle_tool_shed_repositories( trans, installation_dict ):
+def handle_tool_shed_repositories( trans, installation_dict, using_api=False ):
# The following installation_dict entries are all required.
install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ]
new_tool_panel_section = installation_dict[ 'new_tool_panel_section' ]
@@ -272,7 +272,7 @@
no_changes_checked=no_changes_checked,
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
- if message and len( repo_info_dicts ) == 1:
+ if message and len( repo_info_dicts ) == 1 and not using_api:
installed_tool_shed_repository = created_or_updated_tool_shed_repositories[ 0 ]
message += 'Click <a href="%s">here</a> to manage the repository. ' % \
( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) )
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d scripts/api/common.py
--- a/scripts/api/common.py
+++ b/scripts/api/common.py
@@ -1,4 +1,7 @@
-import os, sys, urllib, urllib2
+import logging
+import os
+import sys
+import urllib2
new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', 'lib' ) ]
new_path.extend( sys.path[1:] )
@@ -15,6 +18,8 @@
from Crypto.Util.randpool import RandomPool
from Crypto.Util import number
+log = logging.getLogger( __name__ )
+
def make_url( api_key, url, args=None ):
# Adds the API Key to the URL if it's not already there.
if args is None:
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d scripts/api/install_repository_tools_into_existing_tool_panel_section.py
--- /dev/null
+++ b/scripts/api/install_repository_tools_into_existing_tool_panel_section.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+"""
+Install a specified repository revision from a specified tool shed into Galaxy. This example demonstrates installation of a repository that contains
+valid tools, loading them into an existing section of the Galaxy tool panel. The repository has no tool dependencies or repository dependencies, so only
+a single repository will be installed.
+
+This example requires a tool panel config file (e.g., tool_conf.xml, shed_tool_conf.xml, etc) to contain a tool panel section like the following:
+
+<section id="from_test_tool_shed" name="From Test Tool Shed" version="">
+</section>
+
+usage: ./install_repository_tools_into_existing_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision tool_panel_section_id
+
+Here is a working example of how to use this script to install a repository from the test tool shed.
+./install_repository_tools_into_existing_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb from_test_tool_shed
+"""
+
+import os
+import sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+try:
+ assert sys.argv[ 7 ]
+except IndexError:
+ print 'usage: %s key url tool_shed_url name owner changeset_revision tool_panel_section_id' % os.path.basename( sys.argv[ 0 ] )
+ sys.exit( 1 )
+
+try:
+ data = {}
+ data[ 'tool_shed_url' ] = sys.argv[ 3 ]
+ data[ 'name' ] = sys.argv[ 4 ]
+ data[ 'owner' ] = sys.argv[ 5 ]
+ data[ 'changeset_revision' ] = sys.argv[ 6 ]
+ data[ 'tool_panel_section_id' ] = sys.argv[ 7 ]
+except IndexError:
+ print 'usage: %s key url tool_shed_url name owner changeset_revision tool_panel_section_id' % os.path.basename( sys.argv[ 0 ] )
+ sys.exit( 1 )
+
+submit( sys.argv[ 1 ], sys.argv[ 2 ], data )
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d scripts/api/install_repository_tools_into_new_tool_panel_section.py
--- /dev/null
+++ b/scripts/api/install_repository_tools_into_new_tool_panel_section.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+"""
+Install a specified repository revision from a specified tool shed into Galaxy. This example demonstrates installation of a repository that contains
+valid tools, loading them into a new section of the Galaxy tool panel. The repository has no tool dependencies or repository dependencies, so only
+a single repository will be installed.
+
+usage: ./install_repository_tools_into_existing_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision new_tool_panel_section_label
+
+Here is a working example of how to use this script to install a repository from the test tool shed.
+./install_repository_tools_into_existing_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb From%20Test%20Tool%20Shed
+"""
+
+import os
+import sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+try:
+ assert sys.argv[ 7 ]
+except IndexError:
+ print 'usage: %s key url tool_shed_url name owner changeset_revision tool_panel_section_id' % os.path.basename( sys.argv[ 0 ] )
+ sys.exit( 1 )
+
+try:
+ data = {}
+ data[ 'tool_shed_url' ] = sys.argv[ 3 ]
+ data[ 'name' ] = sys.argv[ 4 ]
+ data[ 'owner' ] = sys.argv[ 5 ]
+ data[ 'changeset_revision' ] = sys.argv[ 6 ]
+ data[ 'new_tool_panel_section_label' ] = sys.argv[ 7 ]
+except IndexError:
+ print 'usage: %s key url tool_shed_url name owner changeset_revision new_tool_panel_section_label' % os.path.basename( sys.argv[ 0 ] )
+ sys.exit( 1 )
+
+submit( sys.argv[ 1 ], sys.argv[ 2 ], data )
diff -r e741400c6578993c3bfe5faf184dd56518a2360a -r efe376544e2263ec8f9788d71d25dd2b4b49a39d scripts/api/install_repository_with_repository_dependencies.py
--- /dev/null
+++ b/scripts/api/install_repository_with_repository_dependencies.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+"""
+Install a specified repository revision from a specified tool shed into Galaxy. This example demonstrates installation of a repository that has
+repository dependencies, so multiple repositories will ultimately be installed. Since no Galaxy tool panel section information is used, all tools
+contained in the installed repositories will be loaded into the Galaxy tool panel outside of any sections.
+
+usage: ./install_repository_tools_into_existing_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision True
+
+Here is a working example of how to use this script to install a repository from the test tool shed.
+./install_repository_tools_into_existing_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu emboss_5 devteam 8ddad0c9a75a True
+"""
+
+import os
+import sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+try:
+ assert sys.argv[ 7 ]
+except IndexError:
+ print 'usage: %s key url tool_shed_url name owner changeset_revision install_repository_dependencies' % os.path.basename( sys.argv[ 0 ] )
+ sys.exit( 1 )
+
+try:
+ data = {}
+ data[ 'tool_shed_url' ] = sys.argv[ 3 ]
+ data[ 'name' ] = sys.argv[ 4 ]
+ data[ 'owner' ] = sys.argv[ 5 ]
+ data[ 'changeset_revision' ] = sys.argv[ 6 ]
+ data[ 'install_repository_dependencies' ] = sys.argv[ 7 ]
+except IndexError:
+ print 'usage: %s key url tool_shed_url name owner changeset_revision install_repository_dependencies' % os.path.basename( sys.argv[ 0 ] )
+ sys.exit( 1 )
+
+submit( sys.argv[ 1 ], sys.argv[ 2 ], data )
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: biostar: bugfix for encoding data
by commits-noreply@bitbucket.org 25 Mar '13
by commits-noreply@bitbucket.org 25 Mar '13
25 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e741400c6578/
Changeset: e741400c6578
User: james_taylor
Date: 2013-03-25 21:30:16
Summary: biostar: bugfix for encoding data
Affected #: 1 file
diff -r cf74f0879bfe111b9a6763dfaff715154d4e1693 -r e741400c6578993c3bfe5faf184dd56518a2360a lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -22,7 +22,7 @@
"""
Encode data to send a question to Biostar
"""
- text = json.dumps(data)
+ text = json.to_json_string(data)
text = base64.urlsafe_b64encode(text)
digest = hmac.new(key, text).hexdigest()
return text, digest
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/75ba4f814129/
Changeset: 75ba4f814129
Branch: next-stable
User: carlfeberhard
Date: 2013-03-25 18:33:30
Summary: api/histories, show: capture job states
Affected #: 2 files
diff -r c82a139db1c98a990760e767da00fe7aeaa03d28 -r 75ba4f814129bb24ef85feb5fde99c115a707d4d lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -833,7 +833,6 @@
Returns a dictionary with state counts for history's HDAs. Key is a
dataset state, value is the number of states in that count.
"""
-
# Build query to get (state, count) pairs.
cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*' ) ]
from_obj = trans.app.model.HistoryDatasetAssociation.table.join( trans.app.model.Dataset.table )
@@ -864,6 +863,135 @@
return state_count_dict
+ def get_hda_summary_dicts( self, trans, history ):
+ """Returns a list of dictionaries containing summary information
+ for each HDA in the given history.
+ """
+ hda_model = trans.model.HistoryDatasetAssociation
+
+ # outer join with job output to get job_state or None
+ job_subq = ( trans.sa_session.query(
+ trans.model.Job.id.label( 'job_id' ),
+ trans.model.Job.state.label( 'job_state' ),
+ trans.model.JobToOutputDatasetAssociation.dataset_id.label( 'hda_id' ) )
+ .join( trans.model.JobToOutputDatasetAssociation ) ).subquery()
+
+ # get state, name, etc.
+ columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
+ trans.model.Dataset.state,
+ job_subq.c.job_state, job_subq.c.job_id )
+ column_keys = [ "name", "hid", "id", "deleted", "state", "job_state", "job_id" ]
+
+ query = ( trans.sa_session.query( *columns )
+ .enable_eagerloads( False )
+ .filter( hda_model.history == history )
+ .join( trans.model.Dataset )
+ .outerjoin(( job_subq, job_subq.c.hda_id == hda_model.id ))
+ .order_by( hda_model.hid ) )
+
+ # build dictionaries, adding history id and encoding all ids
+ hda_dicts = []
+ for hda_tuple in query.all():
+ hda_dict = dict( zip( column_keys, hda_tuple ) )
+ #if hda_dict[ 'job_state' ] not in [ None, 'ok' ]:
+ # print hda_dict[ 'hid' ], hda_dict[ 'name' ], hda_dict[ 'job_state' ]
+ hda_dict[ 'history_id' ] = history.id
+ trans.security.encode_dict_ids( hda_dict )
+ hda_dicts.append( hda_dict )
+ return hda_dicts
+
+ def _get_hda_state_summaries( self, trans, hda_dict_list ):
+ """Returns two dictionaries (in a tuple): state_counts and state_ids.
+ Each is keyed according to the possible hda states:
+ _counts contains a sum of the datasets in each state
+ _ids contains a list of the encoded ids for each hda in that state
+
+ hda_dict_list should be a list of hda data in dictionary form.
+ """
+ #TODO: doc to rst
+ # init counts, ids for each state
+ state_counts = {}
+ state_ids = {}
+ for key, state in trans.app.model.Dataset.states.items():
+ state_counts[ state ] = 0
+ state_ids[ state ] = []
+
+ for hda_dict in hda_dict_list:
+ item_state = hda_dict['state']
+ if not hda_dict['deleted']:
+ state_counts[ item_state ] = state_counts[ item_state ] + 1
+ # needs to return all ids (no deleted check)
+ state_ids[ item_state ].append( hda_dict['id'] )
+
+ return ( state_counts, state_ids )
+
+ def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
+ """Returns the history state based on the states of the HDAs it contains.
+ """
+ states = trans.app.model.Dataset.states
+
+ num_hdas = sum( hda_state_counts.values() )
+ # (default to ERROR)
+ state = states.ERROR
+ if num_hdas == 0:
+ state = states.NEW
+
+ else:
+ if( ( hda_state_counts[ states.RUNNING ] > 0 )
+ or ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
+ or ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
+ state = states.RUNNING
+
+ elif hda_state_counts[ states.QUEUED ] > 0:
+ state = states.QUEUED
+
+ elif( ( hda_state_counts[ states.ERROR ] > 0 )
+ or ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
+ state = states.ERROR
+
+ elif hda_state_counts[ states.OK ] == num_hdas:
+ state = states.OK
+
+ return state
+
+ def _are_jobs_still_running( self, trans, hda_summary_list ):
+ """Determine whether any jobs are running from the given
+ list of hda summary dictionaries.
+ """
+ job_states = trans.model.Job.states
+ def is_job_running( job_state ):
+ return ( ( job_state == job_states.NEW )
+ or( job_state == job_states.UPLOAD )
+ or( job_state == job_states.WAITING )
+ or( job_state == job_states.QUEUED )
+ or( job_state == job_states.RUNNING ) )
+
+ return len( filter( lambda hda: is_job_running( hda['job_state'] ), hda_summary_list ) )
+
+ def get_history_dict( self, trans, history ):
+ """Returns history data in the form of a dictionary.
+ """
+ history_dict = history.get_api_value( view='element', value_mapper={ 'id':trans.security.encode_id })
+
+ history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
+
+ #TODO: separate, move to annotation api, fill on the client
+ history_dict[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
+ if not history_dict[ 'annotation' ]:
+ history_dict[ 'annotation' ] = ''
+
+ #TODO: allow passing as arg
+ hda_summaries = self.get_hda_summary_dicts( trans, history )
+ #TODO remove the following in v2
+ ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries )
+ history_dict[ 'state_details' ] = state_counts
+ history_dict[ 'state_ids' ] = state_ids
+ history_dict[ 'state' ] = self._get_history_state_from_hdas( trans, history, state_counts )
+
+ history_dict[ 'jobs_running' ] = self._are_jobs_still_running( trans, hda_summaries )
+
+ return history_dict
+
class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form objects."""
diff -r c82a139db1c98a990760e767da00fe7aeaa03d28 -r 75ba4f814129bb24ef85feb5fde99c115a707d4d lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -62,26 +62,6 @@
params = util.Params( kwd )
deleted = util.string_as_bool( deleted )
- states = trans.app.model.Dataset.states
-
- def get_dataset_state_summaries( datasets ):
- # cycles through the history's datasets, building counts and id lists for each possible ds state
- state_counts = {}
- state_ids = {}
-
- # init counts, ids for each state
- for key, state in states.items():
- state_counts[state] = 0
- state_ids[state] = []
-
- # cycle through datasets saving each ds' state
- for dataset in datasets:
- item_state = dataset.state
- if not dataset.deleted:
- state_counts[ item_state ] = state_counts[ item_state ] + 1
- state_ids[ item_state ].append( trans.security.encode_id( dataset.id ) )
- return ( state_counts, state_ids )
-
# try to load the history, by most_recently_used or the given id
try:
if history_id == "most_recently_used":
@@ -94,42 +74,7 @@
history = self.get_history( trans, history_id, check_ownership=False,
check_accessible=True, deleted=deleted )
- history_data = history.get_api_value( view='element', value_mapper={'id':trans.security.encode_id} )
- history_data[ 'nice_size' ] = history.get_disk_size( nice_size=True )
-
- #TODO: separate, move to annotation api, fill on the client
- history_data[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
- if not history_data[ 'annotation' ]:
- history_data[ 'annotation' ] = ''
-
- # get the history state using the state summaries of it's datasets (default to ERROR)
- num_sets = len([ hda.id for hda in history.datasets if not hda.deleted ])
- state = states.ERROR
-
- ( state_counts, state_ids ) = get_dataset_state_summaries( history.datasets )
-
- if num_sets == 0:
- state = states.NEW
-
- else:
- if( ( state_counts[ states.RUNNING ] > 0 )
- or ( state_counts[ states.SETTING_METADATA ] > 0 )
- or ( state_counts[ states.UPLOAD ] > 0 ) ):
- state = states.RUNNING
-
- elif state_counts[ states.QUEUED ] > 0:
- state = states.QUEUED
-
- elif( ( state_counts[ states.ERROR ] > 0 )
- or ( state_counts[ states.FAILED_METADATA ] > 0 ) ):
- state = states.ERROR
-
- elif state_counts[ states.OK ] == num_sets:
- state = states.OK
-
- history_data[ 'state' ] = state
- history_data[ 'state_details' ] = state_counts
- history_data[ 'state_ids' ] = state_ids
+ history_data = self.get_history_dict( trans, history )
history_data[ 'contents_url' ] = url_for( 'history_contents', history_id=history_id )
except Exception, e:
https://bitbucket.org/galaxy/galaxy-central/commits/cf74f0879bfe/
Changeset: cf74f0879bfe
User: carlfeberhard
Date: 2013-03-25 18:34:27
Summary: merge next-stable
Affected #: 2 files
diff -r 6e09f0398ddc0f85b0776f7c98d21a6b3458b1d6 -r cf74f0879bfe111b9a6763dfaff715154d4e1693 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -833,7 +833,6 @@
Returns a dictionary with state counts for history's HDAs. Key is a
dataset state, value is the number of states in that count.
"""
-
# Build query to get (state, count) pairs.
cols_to_select = [ trans.app.model.Dataset.table.c.state, func.count( '*' ) ]
from_obj = trans.app.model.HistoryDatasetAssociation.table.join( trans.app.model.Dataset.table )
@@ -864,6 +863,135 @@
return state_count_dict
+ def get_hda_summary_dicts( self, trans, history ):
+ """Returns a list of dictionaries containing summary information
+ for each HDA in the given history.
+ """
+ hda_model = trans.model.HistoryDatasetAssociation
+
+ # outer join with job output to get job_state or None
+ job_subq = ( trans.sa_session.query(
+ trans.model.Job.id.label( 'job_id' ),
+ trans.model.Job.state.label( 'job_state' ),
+ trans.model.JobToOutputDatasetAssociation.dataset_id.label( 'hda_id' ) )
+ .join( trans.model.JobToOutputDatasetAssociation ) ).subquery()
+
+ # get state, name, etc.
+ columns = ( hda_model.name, hda_model.hid, hda_model.id, hda_model.deleted,
+ trans.model.Dataset.state,
+ job_subq.c.job_state, job_subq.c.job_id )
+ column_keys = [ "name", "hid", "id", "deleted", "state", "job_state", "job_id" ]
+
+ query = ( trans.sa_session.query( *columns )
+ .enable_eagerloads( False )
+ .filter( hda_model.history == history )
+ .join( trans.model.Dataset )
+ .outerjoin(( job_subq, job_subq.c.hda_id == hda_model.id ))
+ .order_by( hda_model.hid ) )
+
+ # build dictionaries, adding history id and encoding all ids
+ hda_dicts = []
+ for hda_tuple in query.all():
+ hda_dict = dict( zip( column_keys, hda_tuple ) )
+ #if hda_dict[ 'job_state' ] not in [ None, 'ok' ]:
+ # print hda_dict[ 'hid' ], hda_dict[ 'name' ], hda_dict[ 'job_state' ]
+ hda_dict[ 'history_id' ] = history.id
+ trans.security.encode_dict_ids( hda_dict )
+ hda_dicts.append( hda_dict )
+ return hda_dicts
+
+ def _get_hda_state_summaries( self, trans, hda_dict_list ):
+ """Returns two dictionaries (in a tuple): state_counts and state_ids.
+ Each is keyed according to the possible hda states:
+ _counts contains a sum of the datasets in each state
+ _ids contains a list of the encoded ids for each hda in that state
+
+ hda_dict_list should be a list of hda data in dictionary form.
+ """
+ #TODO: doc to rst
+ # init counts, ids for each state
+ state_counts = {}
+ state_ids = {}
+ for key, state in trans.app.model.Dataset.states.items():
+ state_counts[ state ] = 0
+ state_ids[ state ] = []
+
+ for hda_dict in hda_dict_list:
+ item_state = hda_dict['state']
+ if not hda_dict['deleted']:
+ state_counts[ item_state ] = state_counts[ item_state ] + 1
+ # needs to return all ids (no deleted check)
+ state_ids[ item_state ].append( hda_dict['id'] )
+
+ return ( state_counts, state_ids )
+
+ def _get_history_state_from_hdas( self, trans, history, hda_state_counts ):
+ """Returns the history state based on the states of the HDAs it contains.
+ """
+ states = trans.app.model.Dataset.states
+
+ num_hdas = sum( hda_state_counts.values() )
+ # (default to ERROR)
+ state = states.ERROR
+ if num_hdas == 0:
+ state = states.NEW
+
+ else:
+ if( ( hda_state_counts[ states.RUNNING ] > 0 )
+ or ( hda_state_counts[ states.SETTING_METADATA ] > 0 )
+ or ( hda_state_counts[ states.UPLOAD ] > 0 ) ):
+ state = states.RUNNING
+
+ elif hda_state_counts[ states.QUEUED ] > 0:
+ state = states.QUEUED
+
+ elif( ( hda_state_counts[ states.ERROR ] > 0 )
+ or ( hda_state_counts[ states.FAILED_METADATA ] > 0 ) ):
+ state = states.ERROR
+
+ elif hda_state_counts[ states.OK ] == num_hdas:
+ state = states.OK
+
+ return state
+
+ def _are_jobs_still_running( self, trans, hda_summary_list ):
+ """Determine whether any jobs are running from the given
+ list of hda summary dictionaries.
+ """
+ job_states = trans.model.Job.states
+ def is_job_running( job_state ):
+ return ( ( job_state == job_states.NEW )
+ or( job_state == job_states.UPLOAD )
+ or( job_state == job_states.WAITING )
+ or( job_state == job_states.QUEUED )
+ or( job_state == job_states.RUNNING ) )
+
+ return len( filter( lambda hda: is_job_running( hda['job_state'] ), hda_summary_list ) )
+
+ def get_history_dict( self, trans, history ):
+ """Returns history data in the form of a dictionary.
+ """
+ history_dict = history.get_api_value( view='element', value_mapper={ 'id':trans.security.encode_id })
+
+ history_dict[ 'nice_size' ] = history.get_disk_size( nice_size=True )
+
+ #TODO: separate, move to annotation api, fill on the client
+ history_dict[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
+ if not history_dict[ 'annotation' ]:
+ history_dict[ 'annotation' ] = ''
+
+ #TODO: allow passing as arg
+ hda_summaries = self.get_hda_summary_dicts( trans, history )
+ #TODO remove the following in v2
+ ( state_counts, state_ids ) = self._get_hda_state_summaries( trans, hda_summaries )
+ history_dict[ 'state_details' ] = state_counts
+ history_dict[ 'state_ids' ] = state_ids
+ history_dict[ 'state' ] = self._get_history_state_from_hdas( trans, history, state_counts )
+
+ history_dict[ 'jobs_running' ] = self._are_jobs_still_running( trans, hda_summaries )
+
+ return history_dict
+
class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form objects."""
diff -r 6e09f0398ddc0f85b0776f7c98d21a6b3458b1d6 -r cf74f0879bfe111b9a6763dfaff715154d4e1693 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -62,26 +62,6 @@
params = util.Params( kwd )
deleted = util.string_as_bool( deleted )
- states = trans.app.model.Dataset.states
-
- def get_dataset_state_summaries( datasets ):
- # cycles through the history's datasets, building counts and id lists for each possible ds state
- state_counts = {}
- state_ids = {}
-
- # init counts, ids for each state
- for key, state in states.items():
- state_counts[state] = 0
- state_ids[state] = []
-
- # cycle through datasets saving each ds' state
- for dataset in datasets:
- item_state = dataset.state
- if not dataset.deleted:
- state_counts[ item_state ] = state_counts[ item_state ] + 1
- state_ids[ item_state ].append( trans.security.encode_id( dataset.id ) )
- return ( state_counts, state_ids )
-
# try to load the history, by most_recently_used or the given id
try:
if history_id == "most_recently_used":
@@ -94,42 +74,7 @@
history = self.get_history( trans, history_id, check_ownership=False,
check_accessible=True, deleted=deleted )
- history_data = history.get_api_value( view='element', value_mapper={'id':trans.security.encode_id} )
- history_data[ 'nice_size' ] = history.get_disk_size( nice_size=True )
-
- #TODO: separate, move to annotation api, fill on the client
- history_data[ 'annotation' ] = history.get_item_annotation_str( trans.sa_session, trans.user, history )
- if not history_data[ 'annotation' ]:
- history_data[ 'annotation' ] = ''
-
- # get the history state using the state summaries of it's datasets (default to ERROR)
- num_sets = len([ hda.id for hda in history.datasets if not hda.deleted ])
- state = states.ERROR
-
- ( state_counts, state_ids ) = get_dataset_state_summaries( history.datasets )
-
- if num_sets == 0:
- state = states.NEW
-
- else:
- if( ( state_counts[ states.RUNNING ] > 0 )
- or ( state_counts[ states.SETTING_METADATA ] > 0 )
- or ( state_counts[ states.UPLOAD ] > 0 ) ):
- state = states.RUNNING
-
- elif state_counts[ states.QUEUED ] > 0:
- state = states.QUEUED
-
- elif( ( state_counts[ states.ERROR ] > 0 )
- or ( state_counts[ states.FAILED_METADATA ] > 0 ) ):
- state = states.ERROR
-
- elif state_counts[ states.OK ] == num_sets:
- state = states.OK
-
- history_data[ 'state' ] = state
- history_data[ 'state_details' ] = state_counts
- history_data[ 'state_ids' ] = state_ids
+ history_data = self.get_history_dict( trans, history )
history_data[ 'contents_url' ] = url_for( 'history_contents', history_id=history_id )
except Exception, e:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Specify return codes so the automated framework will more correctly report success/failure.
by commits-noreply@bitbucket.org 25 Mar '13
by commits-noreply@bitbucket.org 25 Mar '13
25 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6e09f0398ddc/
Changeset: 6e09f0398ddc
User: inithello
Date: 2013-03-25 17:05:17
Summary: Specify return codes so the automated framework will more correctly report success/failure.
Affected #: 1 file
diff -r 48ff47731eba48aededf1c608d7dd1e03dcd7d1f -r 6e09f0398ddc0f85b0776f7c98d21a6b3458b1d6 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -734,7 +734,7 @@
( changeset_revision, name ) )
# Run the cleanup method. This removes tool functional test methods from the test_toolbox module and uninstalls the
# repository using Twill.
- execute_uninstall_method( repository_info_dict )
+ success = execute_uninstall_method( repository_info_dict )
# Set the test_toolbox.toolbox module-level variable to the new app.toolbox.
test_toolbox.toolbox = app.toolbox
else:
@@ -782,20 +782,23 @@
print "# %d repositories failed:" % len( repositories_failed )
show_summary_output( repositories_failed )
if repositories_failed_install:
+ # Set success to False so that the return code will not be 0.
+ success = False
print '# ----------------------------------------------------------------------------------'
print "# %d repositories not installed correctly:" % len( repositories_failed_install )
show_summary_output( repositories_failed_install )
print "####################################################################################"
-
- if success:
+ if repositories_tested > 0:
+ if success:
+ return 0
+ else:
+ return 1
+ else:
return 0
- else:
- return 1
if __name__ == "__main__":
now = strftime( "%Y-%m-%d %H:%M:%S" )
print "####################################################################################"
print "# %s - running repository installation and testing script." % now
print "####################################################################################"
- return_code = main()
- sys.exit( return_code )
+ sys.exit( main() )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fa23454cb097/
Changeset: fa23454cb097
User: jgoecks
Date: 2013-03-25 14:37:17
Summary: Trackster: more flexible parsing of location string.
Affected #: 1 file
diff -r f6743b36ce0a54e49acf06d09e03d01584d55514 -r fa23454cb09791cf1765d314dac07668b765795a static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -1276,7 +1276,7 @@
view.reference_track.init();
}
}
- if (low !== undefined && high !== undefined) {
+ if (low && high) {
view.low = Math.max(low, 0);
view.high = Math.min(high, view.max_high);
}
@@ -1289,34 +1289,49 @@
view.request_redraw();
}
},
+
+ /**
+ * Change viewing region to that denoted by string. General format of string is:
+ *
+ * <chrom>[ {separator}<start>[-<end>] ]
+ *
+ * where separator can be whitespace or a colon. Examples:
+ *
+ * chr22
+ * chr1:100-200
+ * chr7 89999
+ * chr8 90000 990000
+ */
go_to: function(str) {
- // Preprocess str to remove spaces and commas.
- str = str.replace(/ |,/g, "");
-
- // Go to new location.
- var view = this,
- new_low,
- new_high,
- chrom_pos = str.split(":"),
+ // Remove commas.
+ str = str.replace(/,/g, '');
+
+ // Replace colons and hyphens with space for easy parsing.
+ str = str.replace(/:|\-/g, ' ');
+
+ // Parse new location.
+ var chrom_pos = str.split(/\s+/),
chrom = chrom_pos[0],
- pos = chrom_pos[1];
-
- if (pos !== undefined) {
- try {
- var pos_split = pos.split("-");
- new_low = parseInt(pos_split[0], 10);
- new_high = parseInt(pos_split[1], 10);
- } catch (e) {
- return false;
- }
+ new_low = (chrom_pos[1] ? parseInt(chrom_pos[1], 10) : null),
+ new_high = (chrom_pos[2] ? parseInt(chrom_pos[2], 10) : null);
+
+ // If no new high, new_low is the position of focus, so adjust low, high
+ // accordingly.
+ if (!new_high) {
+ // HACK: max resolution is 30 bases,so adjust low, high accordingly.
+ new_low = new_low - 15;
+ new_high = new_low + 15;
}
- view.change_chrom(chrom, new_low, new_high);
+
+ this.change_chrom(chrom, new_low, new_high);
},
+
move_fraction: function(fraction) {
var view = this;
var span = view.high - view.low;
this.move_delta(fraction * span);
},
+
move_delta: function(delta_chrom) {
// Update low, high.
var view = this;
https://bitbucket.org/galaxy/galaxy-central/commits/48ff47731eba/
Changeset: 48ff47731eba
User: jgoecks
Date: 2013-03-25 14:37:48
Summary: Automated merge.
Affected #: 1 file
diff -r fa23454cb09791cf1765d314dac07668b765795a -r 48ff47731eba48aededf1c608d7dd1e03dcd7d1f test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -454,9 +454,10 @@
def fill_review_form( self, review_contents_dict, strings_displayed=[], strings_not_displayed=[] ):
kwd = dict()
+ changed = False
for label, contents in review_contents_dict.items():
- strings_displayed.append( label )
if contents:
+ changed = True
kwd[ '%s__ESEP__comment' % label ] = contents[ 'comment' ]
kwd[ '%s__ESEP__rating' % label ] = contents[ 'rating' ]
if 'private' in contents:
@@ -464,8 +465,10 @@
kwd[ '%s__ESEP__approved' % label ] = contents[ 'approved' ]
else:
kwd[ '%s__ESEP__approved' % label ] = 'not_applicable'
+ self.check_for_strings( strings_displayed, strings_not_displayed )
self.submit_form( 1, 'Workflows__ESEP__review_button', **kwd )
- strings_displayed.append( 'Reviews were saved' )
+ if changed:
+ strings_displayed.append( 'Reviews were saved' )
self.check_for_strings( strings_displayed, strings_not_displayed )
def galaxy_login( self, email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Update tests to reflect new default for component review approval status.
by commits-noreply@bitbucket.org 25 Mar '13
by commits-noreply@bitbucket.org 25 Mar '13
25 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9baf9d6ce3a5/
Changeset: 9baf9d6ce3a5
User: inithello
Date: 2013-03-25 14:36:02
Summary: Update tests to reflect new default for component review approval status.
Affected #: 1 file
diff -r f6743b36ce0a54e49acf06d09e03d01584d55514 -r 9baf9d6ce3a5b32e3f14ddcdee5eae85e1138257 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -454,9 +454,10 @@
def fill_review_form( self, review_contents_dict, strings_displayed=[], strings_not_displayed=[] ):
kwd = dict()
+ changed = False
for label, contents in review_contents_dict.items():
- strings_displayed.append( label )
if contents:
+ changed = True
kwd[ '%s__ESEP__comment' % label ] = contents[ 'comment' ]
kwd[ '%s__ESEP__rating' % label ] = contents[ 'rating' ]
if 'private' in contents:
@@ -464,8 +465,10 @@
kwd[ '%s__ESEP__approved' % label ] = contents[ 'approved' ]
else:
kwd[ '%s__ESEP__approved' % label ] = 'not_applicable'
+ self.check_for_strings( strings_displayed, strings_not_displayed )
self.submit_form( 1, 'Workflows__ESEP__review_button', **kwd )
- strings_displayed.append( 'Reviews were saved' )
+ if changed:
+ strings_displayed.append( 'Reviews were saved' )
self.check_for_strings( strings_displayed, strings_not_displayed )
def galaxy_login( self, email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Refactor several methods in Galaxy's admin_toolshed controller in preparation for supporting installing tool shed repositories via the tool shed and Galaxy APIs.
by commits-noreply@bitbucket.org 23 Mar '13
by commits-noreply@bitbucket.org 23 Mar '13
23 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f6743b36ce0a/
Changeset: f6743b36ce0a
User: greg
Date: 2013-03-23 15:44:46
Summary: Refactor several methods in Galaxy's admin_toolshed controller in preparation for supporting installing tool shed repositories via the tool shed and Galaxy APIs.
Affected #: 5 files
diff -r e0da441ad10c518658d8be665a0bf6152a6acae0 -r f6743b36ce0a54e49acf06d09e03d01584d55514 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1,19 +1,35 @@
-import logging, os, shutil, tempfile, urllib2
+import logging
+import os
+import shutil
+import urllib2
from admin import AdminGalaxy
-from galaxy import web, util, eggs, tools
-from galaxy.web.form_builder import SelectField, CheckboxField
-from galaxy.web.framework.helpers import iff, grids
+from galaxy import eggs
+from galaxy import web
+from galaxy import util
+from galaxy.web.form_builder import CheckboxField
+from galaxy.web.framework.helpers import grids
+from galaxy.web.framework.helpers import iff
from galaxy.util import json
from galaxy.model.orm import or_
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util
-from tool_shed.util import readme_util, repository_dependency_util, tool_dependency_util, tool_util, workflow_util
+from tool_shed.util import common_install_util
+from tool_shed.util import data_manager_util
+from tool_shed.util import datatype_util
+from tool_shed.util import encoding_util
+from tool_shed.util import metadata_util
+from tool_shed.util import readme_util
+from tool_shed.util import repository_dependency_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
+from tool_shed.util import workflow_util
from tool_shed.galaxy_install import repository_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
import pkg_resources
eggs.require( 'mercurial' )
-from mercurial import hg, ui, commands
+from mercurial import commands
+from mercurial import hg
+from mercurial import ui
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree
@@ -520,96 +536,21 @@
"""Install specified tool shed repositories."""
shed_tool_conf = kwd.get( 'shed_tool_conf', '' )
tool_path = kwd[ 'tool_path' ]
- includes_tool_dependencies = util.string_as_bool( kwd[ 'includes_tool_dependencies' ] )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
- # There must be a one-to-one mapping between items in the 3 lists:tool_shed_repositories, tool_panel_section_keys, repo_info_dicts.
+ # There must be a one-to-one mapping between items in the 3 lists: tool_shed_repositories, tool_panel_section_keys, repo_info_dicts.
tool_panel_section_keys = util.listify( kwd[ 'tool_panel_section_keys' ] )
repo_info_dicts = util.listify( kwd[ 'repo_info_dicts' ] )
for index, tool_shed_repository in enumerate( tool_shed_repositories ):
repo_info_dict = repo_info_dicts[ index ]
tool_panel_section_key = tool_panel_section_keys[ index ]
- if tool_panel_section_key:
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- tool_section = None
- if isinstance( repo_info_dict, basestring ):
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
- # Clone each repository to the configured location.
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
- repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
- clone_dir = os.path.join( tool_path, relative_clone_dir )
- relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
- install_dir = os.path.join( tool_path, relative_install_dir )
- cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
- if cloned_ok:
- if reinstalling:
- # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
- changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
- current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
- current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
- if current_ctx_rev != ctx_rev:
- repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
- repository_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
- suc.update_repository( repo, ctx_rev=current_ctx_rev )
- repository_util.handle_repository_contents( trans,
- tool_shed_repository=tool_shed_repository,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=tool_shed_repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- reinstalling=reinstalling )
- trans.sa_session.refresh( tool_shed_repository )
- metadata = tool_shed_repository.metadata
- if 'tools' in metadata:
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- suc.update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
- tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
- url = suc.url_join( tool_shed_url,
- '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = json.from_json_string( text )
- tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
- else:
- message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
- message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
- message += "from the installed repository's <b>Repository Actions</b> menu. "
- status = 'error'
- if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
- work_dir = tempfile.mkdtemp()
- # Install tool dependencies.
- suc.update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
- # Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
- installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_shed_repository.tool_dependencies )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
- else:
- # An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
- self.set_repository_attributes( trans,
- tool_shed_repository,
- status=trans.model.ToolShedRepository.installation_status.ERROR,
- error_message=error_message,
- deleted=False,
- uninstalled=False,
- remove_from_disk=True )
+ repository_util.install_tool_shed_repository( trans,
+ tool_shed_repository,
+ repo_info_dict,
+ tool_panel_section_key,
+ shed_tool_conf,
+ tool_path,
+ install_tool_dependencies,
+ reinstalling=reinstalling )
tsr_ids_for_monitoring = [ trans.security.encode_id( tsr.id ) for tsr in tool_shed_repositories ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='monitor_repository_installation',
@@ -844,7 +785,7 @@
@web.expose
@web.require_admin
def prepare_for_install( self, trans, **kwd ):
- if not have_shed_tool_conf_for_install( trans ):
+ if not suc.have_shed_tool_conf_for_install( trans ):
message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one shed tool configuration file name with a '
message += '<b><toolbox></b> tag that includes a <b>tool_path</b> attribute value which is a directory relative to the Galaxy installation '
message += 'directory in order to automatically install tools from a Galaxy tool shed (e.g., the file name <b>shed_tool_conf.xml</b> whose '
@@ -942,9 +883,9 @@
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_repositories',
**kwd ) )
- shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans )
+ shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans )
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
- tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
+ tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans )
if len( repo_info_dicts ) == 1:
# If we're installing a single repository, see if it contains a readme or dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
@@ -1278,7 +1219,7 @@
original_section_name = ''
else:
original_section_name = ''
- tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
+ tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans )
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
@@ -1294,7 +1235,7 @@
no_changes_check_box = None
original_section_name = ''
tool_panel_section_select_field = None
- shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans )
+ shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans )
containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
tool_shed_url=tool_shed_url,
tool_path=tool_path,
@@ -1398,13 +1339,13 @@
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
- self.set_repository_attributes( trans,
- repository,
- status=trans.model.ToolShedRepository.installation_status.NEW,
- error_message=None,
- deleted=False,
- uninstalled=False,
- remove_from_disk=True )
+ suc.set_repository_attributes( trans,
+ repository,
+ status=trans.model.ToolShedRepository.installation_status.NEW,
+ error_message=None,
+ deleted=False,
+ uninstalled=False,
+ remove_from_disk=True )
new_kwd = {}
new_kwd[ 'message' ] = "You can now attempt to install the repository named <b>%s</b> again." % repository.name
new_kwd[ 'status' ] = "done"
@@ -1415,20 +1356,6 @@
action='manage_repository',
**kwd ) )
- def set_repository_attributes( self, trans, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
- if remove_from_disk:
- relative_install_dir = repository.repo_path( trans.app )
- if relative_install_dir:
- clone_dir = os.path.abspath( relative_install_dir )
- shutil.rmtree( clone_dir )
- log.debug( "Removed repository installation directory: %s" % str( clone_dir ) )
- repository.error_message = error_message
- repository.status = status
- repository.deleted = deleted
- repository.uninstalled = uninstalled
- trans.sa_session.add( repository )
- trans.sa_session.flush()
-
@web.expose
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
@@ -1680,46 +1607,3 @@
metadata=metadata,
message=message,
status=status )
-
-## ---- Utility methods -------------------------------------------------------
-
-def build_shed_tool_conf_select_field( trans ):
- """Build a SelectField whose options are the keys in trans.app.toolbox.shed_tool_confs."""
- options = []
- for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
- shed_tool_conf_filename = shed_tool_conf_dict[ 'config_filename' ]
- if shed_tool_conf_filename != trans.app.config.migrated_tools_config:
- if shed_tool_conf_filename.startswith( './' ):
- option_label = shed_tool_conf_filename.replace( './', '', 1 )
- else:
- option_label = shed_tool_conf_filename
- options.append( ( option_label, shed_tool_conf_filename ) )
- select_field = SelectField( name='shed_tool_conf' )
- for option_tup in options:
- select_field.add_option( option_tup[0], option_tup[1] )
- return select_field
-
-def build_tool_panel_section_select_field( trans ):
- """Build a SelectField whose options are the sections of the current in-memory toolbox."""
- options = []
- for k, v in trans.app.toolbox.tool_panel.items():
- if isinstance( v, tools.ToolSection ):
- options.append( ( v.name, v.id ) )
- select_field = SelectField( name='tool_panel_section', display='radio' )
- for option_tup in options:
- select_field.add_option( option_tup[0], option_tup[1] )
- return select_field
-
-def can_select_tool_panel_section():
- pass
-
-def have_shed_tool_conf_for_install( trans ):
- if not trans.app.toolbox.shed_tool_confs:
- return False
- migrated_tools_conf_path, migrated_tools_conf_name = os.path.split( trans.app.config.migrated_tools_config )
- for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
- shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
- shed_tool_conf_path, shed_tool_conf_name = os.path.split( shed_tool_conf )
- if shed_tool_conf_name != migrated_tools_conf_name:
- return True
- return False
diff -r e0da441ad10c518658d8be665a0bf6152a6acae0 -r f6743b36ce0a54e49acf06d09e03d01584d55514 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -1,12 +1,16 @@
import logging
import os
+import shutil
+import tempfile
import threading
import urllib2
from galaxy import tools
+from galaxy.util import json
from galaxy import web
from galaxy.model.orm import or_
from galaxy.webapps.tool_shed.util import container_util
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import common_install_util
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
@@ -63,7 +67,7 @@
all_repository_dependencies=None,
handled_key_rd_dicts=None,
circular_repository_dependencies=None )
- tool_dependencies = metadata.get( 'tool_dependencies', None )
+ tool_dependencies = metadata.get( 'tool_dependencies', {} )
if tool_dependencies:
new_tool_dependencies = {}
for dependency_key, requirements_dict in tool_dependencies.items():
@@ -347,6 +351,91 @@
query = trans.sa_session.query( trans.model.ToolShedRepository ).filter( or_( *clause_list ) )
return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids
+def install_tool_shed_repository( trans, tool_shed_repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies,
+ reinstalling=False ):
+ if tool_panel_section_key:
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ tool_section = None
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ # Clone each repository to the configured location.
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
+ repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+ relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ clone_dir = os.path.join( tool_path, relative_clone_dir )
+ relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+ install_dir = os.path.join( tool_path, relative_install_dir )
+ cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ if cloned_ok:
+ if reinstalling:
+ # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
+ changeset_revision_dict = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
+ current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
+ current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
+ if current_ctx_rev != ctx_rev:
+ repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
+ pull_repository( repo, repository_clone_url, current_changeset_revision )
+ suc.update_repository( repo, ctx_rev=current_ctx_rev )
+ handle_repository_contents( trans,
+ tool_shed_repository=tool_shed_repository,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=tool_shed_repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ reinstalling=reinstalling )
+ trans.sa_session.refresh( tool_shed_repository )
+ metadata = tool_shed_repository.metadata
+ if 'tools' in metadata:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = json.from_json_string( text )
+ tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
+ else:
+ message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
+ message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
+ message += "from the installed repository's <b>Repository Actions</b> menu. "
+ status = 'error'
+ if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
+ work_dir = tempfile.mkdtemp()
+ # Install tool dependencies.
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ # Get the tool_dependencies.xml file from the repository.
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
+ installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_shed_repository.tool_dependencies )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ else:
+ # An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
+ suc.set_repository_attributes( trans,
+ tool_shed_repository,
+ status=trans.model.ToolShedRepository.installation_status.ERROR,
+ error_message=error_message,
+ deleted=False,
+ uninstalled=False,
+ remove_from_disk=True )
+
def merge_containers_dicts_for_new_install( containers_dicts ):
"""
When installing one or more tool shed repositories for the first time, the received list of containers_dicts contains a containers_dict for
diff -r e0da441ad10c518658d8be665a0bf6152a6acae0 -r f6743b36ce0a54e49acf06d09e03d01584d55514 lib/tool_shed/scripts/api/common.py
--- /dev/null
+++ b/lib/tool_shed/scripts/api/common.py
@@ -0,0 +1,165 @@
+import os, sys, urllib, urllib2
+
+new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', 'lib' ) ]
+new_path.extend( sys.path[ 1: ] )
+sys.path = new_path
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( "simplejson" )
+import simplejson
+
+pkg_resources.require( "pycrypto" )
+from Crypto.Cipher import Blowfish
+from Crypto.Util.randpool import RandomPool
+from Crypto.Util import number
+
+def encode_id( config_id_secret, obj_id ):
+ # Utility method to encode ID's
+ id_cipher = Blowfish.new( config_id_secret )
+ # Convert to string
+ s = str( obj_id )
+ # Pad to a multiple of 8 with leading "!"
+ s = ( "!" * ( 8 - len(s) % 8 ) ) + s
+ # Encrypt
+ return id_cipher.encrypt( s ).encode( 'hex' )
+
+def delete( api_key, url, data, return_formatted=True ):
+ # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ try:
+ url = make_url( api_key, url )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req.get_method = lambda: 'DELETE'
+ r = simplejson.loads( urllib2.urlopen( req ).read() )
+ except urllib2.HTTPError, e:
+ if return_formatted:
+ print e
+ print e.read( 1024 )
+ sys.exit( 1 )
+ else:
+ return 'Error. '+ str( e.read( 1024 ) )
+ if not return_formatted:
+ return r
+ print 'Response'
+ print '--------'
+ print r
+
+def display( api_key, url, return_formatted=True ):
+ # Sends an API GET request and acts as a generic formatter for the JSON response.
+ try:
+ r = get( api_key, url )
+ except urllib2.HTTPError, e:
+ print e
+ print e.read( 1024 ) # Only return the first 1K of errors.
+ sys.exit( 1 )
+ if type( r ) == unicode:
+ print 'error: %s' % r
+ return None
+ if not return_formatted:
+ return r
+ elif type( r ) == list:
+ # Response is a collection as defined in the REST style.
+ print 'Collection Members'
+ print '------------------'
+ for n, i in enumerate(r):
+ # All collection members should have a name and url in the response.
+ print '#%d: %s' % (n+1, i.pop( 'url' ) )
+ if 'name' in i:
+ print ' name: %s' % i.pop( 'name' )
+ for k, v in i.items():
+ print ' %s: %s' % ( k, v )
+ print ''
+ print '%d element(s) in collection' % len( r )
+ elif type( r ) == dict:
+ # Response is an element as defined in the REST style.
+ print 'Member Information'
+ print '------------------'
+ for k, v in r.items():
+ print '%s: %s' % ( k, v )
+ elif type( r ) == str:
+ print r
+ else:
+ print 'response is unknown type: %s' % type( r )
+
+def get( api_key, url ):
+ # Do the actual GET.
+ url = make_url( api_key, url )
+ try:
+ return simplejson.loads( urllib2.urlopen( url ).read() )
+ except simplejson.decoder.JSONDecodeError, e:
+ print "URL did not return JSON data"
+ sys.exit(1)
+
+def make_url( api_key, url, args=None ):
+ # Adds the API Key to the URL if it's not already there.
+ if args is None:
+ args = []
+ argsep = '&'
+ if '?' not in url:
+ argsep = '?'
+ if '?key=' not in url and '&key=' not in url:
+ args.insert( 0, ( 'key', api_key ) )
+ return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
+
+def post( api_key, url, data ):
+ # Do the actual POST.
+ url = make_url( api_key, url )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) )
+ return simplejson.loads( urllib2.urlopen( req ).read() )
+
+def put( api_key, url, data ):
+ # Do the actual PUT.
+ url = make_url( api_key, url )
+ req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ))
+ req.get_method = lambda: 'PUT'
+ return simplejson.loads( urllib2.urlopen( req ).read() )
+
+def submit( api_key, url, data, return_formatted=True ):
+ # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ try:
+ r = post( api_key, url, data )
+ except urllib2.HTTPError, e:
+ if return_formatted:
+ print e
+ print e.read( 1024 )
+ sys.exit( 1 )
+ else:
+ return 'Error. '+ str( e.read( 1024 ) )
+ if not return_formatted:
+ return r
+ print 'Response'
+ print '--------'
+ if type( r ) == list:
+ # Currently the only implemented responses are lists of dicts, because submission creates some number of collection elements.
+ for i in r:
+ if type( i ) == dict:
+ if 'url' in i:
+ print i.pop( 'url' )
+ else:
+ print '----'
+ if 'name' in i:
+ print ' name: %s' % i.pop( 'name' )
+ for k, v in i.items():
+ print ' %s: %s' % ( k, v )
+ else:
+ print i
+ else:
+ print r
+
+def update( api_key, url, data, return_formatted=True ):
+ # Sends an API PUT request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy.
+ try:
+ r = put( api_key, url, data )
+ except urllib2.HTTPError, e:
+ if return_formatted:
+ print e
+ print e.read( 1024 )
+ sys.exit( 1 )
+ else:
+ return 'Error. '+ str( e.read( 1024 ) )
+ if not return_formatted:
+ return r
+ print 'Response'
+ print '--------'
+ print r
diff -r e0da441ad10c518658d8be665a0bf6152a6acae0 -r f6743b36ce0a54e49acf06d09e03d01584d55514 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -985,6 +985,17 @@
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
return galaxy_url
+def have_shed_tool_conf_for_install( trans ):
+ if not trans.app.toolbox.shed_tool_confs:
+ return False
+ migrated_tools_conf_path, migrated_tools_conf_name = os.path.split( trans.app.config.migrated_tools_config )
+ for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
+ shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
+ shed_tool_conf_path, shed_tool_conf_name = os.path.split( shed_tool_conf )
+ if shed_tool_conf_name != migrated_tools_conf_name:
+ return True
+ return False
+
def open_repository_files_folder( trans, folder_path ):
"""Return a list of dictionaries, each of which contains information for a file or directory contained within a directory in a repository file hierarchy."""
try:
@@ -1072,6 +1083,20 @@
"""Return a reversed list of changesets in the repository changelog up to and including the included_upper_bounds_changeset_revision."""
return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+def set_repository_attributes( trans, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
+ if remove_from_disk:
+ relative_install_dir = repository.repo_path( trans.app )
+ if relative_install_dir:
+ clone_dir = os.path.abspath( relative_install_dir )
+ shutil.rmtree( clone_dir )
+ log.debug( "Removed repository installation directory: %s" % str( clone_dir ) )
+ repository.error_message = error_message
+ repository.status = status
+ repository.deleted = deleted
+ repository.uninstalled = uninstalled
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+
def strip_path( fpath ):
"""Attempt to strip the path from a file name."""
if not fpath:
diff -r e0da441ad10c518658d8be665a0bf6152a6acae0 -r f6743b36ce0a54e49acf06d09e03d01584d55514 lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -1,12 +1,17 @@
-import filecmp, logging, os, shutil, tempfile
-import tool_shed.util.shed_util_common as suc
+import filecmp
+import logging
+import os
+import shutil
+import tempfile
+import galaxy.tools
from galaxy import util
-import galaxy.tools
-from galaxy.tools.search import ToolBoxSearch
+from galaxy.datatypes import checkers
from galaxy.model.orm import and_
-from galaxy.datatypes import checkers
from galaxy.tools import parameters
from galaxy.tools.parameters import dynamic_options
+from galaxy.tools.search import ToolBoxSearch
+from galaxy.web.form_builder import SelectField
+import tool_shed.util.shed_util_common as suc
from galaxy import eggs
import pkg_resources
@@ -79,6 +84,33 @@
app.toolbox.write_integrated_tool_panel_config_file()
app.toolbox_search = ToolBoxSearch( app.toolbox )
+def build_shed_tool_conf_select_field( trans ):
+ """Build a SelectField whose options are the keys in trans.app.toolbox.shed_tool_confs."""
+ options = []
+ for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
+ shed_tool_conf_filename = shed_tool_conf_dict[ 'config_filename' ]
+ if shed_tool_conf_filename != trans.app.config.migrated_tools_config:
+ if shed_tool_conf_filename.startswith( './' ):
+ option_label = shed_tool_conf_filename.replace( './', '', 1 )
+ else:
+ option_label = shed_tool_conf_filename
+ options.append( ( option_label, shed_tool_conf_filename ) )
+ select_field = SelectField( name='shed_tool_conf' )
+ for option_tup in options:
+ select_field.add_option( option_tup[ 0 ], option_tup[ 1 ] )
+ return select_field
+
+def build_tool_panel_section_select_field( trans ):
+ """Build a SelectField whose options are the sections of the current in-memory toolbox."""
+ options = []
+ for k, v in trans.app.toolbox.tool_panel.items():
+ if isinstance( v, galaxy.tools.ToolSection ):
+ options.append( ( v.name, v.id ) )
+ select_field = SelectField( name='tool_panel_section', display='radio' )
+ for option_tup in options:
+ select_field.add_option( option_tup[ 0 ], option_tup[ 1 ] )
+ return select_field
+
def can_use_tool_config_disk_file( trans, repository, repo, file_path, changeset_revision ):
"""
Determine if repository's tool config file on disk can be used. This method is restricted to tool config files since, with the
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
15 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ca59c76da865/
Changeset: ca59c76da865
Branch: next-stable
User: natefoo
Date: 2013-03-20 17:55:52
Summary: Fix for processing Torque's 'E' state in the CLI runner.
Affected #: 1 file
diff -r 6a80f6558febba87a6e32b9533256543597e8d30 -r ca59c76da865a29e14dd77f9152d2ae46126f11d lib/galaxy/jobs/runners/cli_job/torque.py
--- a/lib/galaxy/jobs/runners/cli_job/torque.py
+++ b/lib/galaxy/jobs/runners/cli_job/torque.py
@@ -128,5 +128,6 @@
return job_states.OK
def __get_job_state(self, state):
- return { 'R' : job_states.RUNNING,
+ return { 'E' : job_states.RUNNING,
+ 'R' : job_states.RUNNING,
'Q' : job_states.QUEUED }.get(state, state)
https://bitbucket.org/galaxy/galaxy-central/commits/ea325de13398/
Changeset: ea325de13398
Branch: next-stable
User: natefoo
Date: 2013-03-20 17:56:13
Summary: Bugfixes for the pbs job runner and the move to job destinations.
Affected #: 1 file
diff -r ca59c76da865a29e14dd77f9152d2ae46126f11d -r ea325de133988fd36eeda175fd8df45081be4412 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -141,6 +141,9 @@
def url_to_destination(self, url):
"""Convert a legacy URL to a job destination"""
+ if not url:
+ return
+
# Determine the the PBS server
url_split = url.split("/")
server = url_split[2]
@@ -161,6 +164,9 @@
try:
opts = url.split('/')[4].strip().lstrip('-').split(' -')
assert opts != ['']
+ # stripping the - comes later (in parse_destination_params)
+ for i, opt in enumerate(opts):
+ opts[i] = '-' + opt
except:
opts = []
for opt in opts:
@@ -210,6 +216,8 @@
return rval
def __get_pbs_server(self, job_destination_params):
+ if job_destination_params is None:
+ return None
return job_destination_params['destination'].split('@')[-1]
def queue_job( self, job_wrapper ):
@@ -600,9 +608,13 @@
try:
pbs_server_name = self.__get_pbs_server( job.destination_params )
+ if pbs_server_name is None:
+ log.debug("(%s) Job queued but no destination stored in job params, cannot delete"
+ % job_tag )
+ return
c = pbs.pbs_connect( pbs_server_name )
if c <= 0:
- log.debug("%s Connection to PBS server for job delete failed"
+ log.debug("(%s) Connection to PBS server for job delete failed"
% job_tag )
return
pbs.pbs_deljob( c, job.get_job_runner_external_id(), '' )
https://bitbucket.org/galaxy/galaxy-central/commits/86668e4a72fc/
Changeset: 86668e4a72fc
Branch: next-stable
User: natefoo
Date: 2013-03-20 18:11:18
Summary: Add cleanup when skipping a queued job that was deleted.
Affected #: 1 file
diff -r ea325de133988fd36eeda175fd8df45081be4412 -r 86668e4a72fc11d5bf0161e567b9c6c082560e63 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -103,8 +103,14 @@
job_wrapper.is_ready = False
# Make sure the job hasn't been deleted
- if job_state != model.Job.states.QUEUED:
+ if job_state == model.Job.states.DELETED:
+ log.debug( "(%s) Job deleted by user before it entered the %s queue" % ( job_id, self.runner_name ) )
+ if self.app.config.cleanup_job in ( "always", "onsuccess" ):
+ job_wrapper.cleanup()
+ return
+ elif job_state != model.Job.states.QUEUED:
log.info( "(%d) Job is in state %s, skipping execution" % ( job_id, job_state ) )
+ # cleanup may not be safe in all states
return
# Prepare the job
https://bitbucket.org/galaxy/galaxy-central/commits/ae19f508eaa9/
Changeset: ae19f508eaa9
Branch: next-stable
User: natefoo
Date: 2013-03-20 18:11:45
Summary: Have the PBS runner use the superclass convenience method for pre-queueing sanity checks.
Affected #: 1 file
diff -r 86668e4a72fc11d5bf0161e567b9c6c082560e63 -r ae19f508eaa989a6d16ed59de1308ce8e05b0ad4 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -222,29 +222,16 @@
def queue_job( self, job_wrapper ):
"""Create PBS script for a job and submit it to the PBS queue"""
+ # Superclass method has some basic sanity checks
+ super( LocalJobRunner, self ).queue_job( job_wrapper )
+ if not job_wrapper.is_ready:
+ return
- try:
- job_wrapper.prepare()
- command_line = self.build_command_line( job_wrapper, include_metadata=not( self.app.config.pbs_stage_path ) )
- except:
- job_wrapper.fail( "failure preparing job", exception=True )
- log.exception("failure running job %d" % job_wrapper.job_id)
- return
+ # command line has been added to the wrapper by the superclass queue_job()
+ command_line = job_wrapper.runner_command_line
job_destination = job_wrapper.job_destination
- # This is silly, why would we queue a job with no command line?
- if not command_line:
- job_wrapper.finish( '', '' )
- return
-
- # Check for deletion before we change state
- if job_wrapper.get_state() == model.Job.states.DELETED:
- log.debug( "Job %s deleted by user before it entered the PBS queue" % job_wrapper.job_id )
- if self.app.config.cleanup_job in ( "always", "onsuccess" ):
- job_wrapper.cleanup()
- return
-
# Determine the job's PBS destination (server/queue) and options from the job destination definition
pbs_queue_name = None
pbs_server_name = self.default_pbs_server
https://bitbucket.org/galaxy/galaxy-central/commits/d0fa91fc995e/
Changeset: d0fa91fc995e
Branch: next-stable
User: natefoo
Date: 2013-03-21 18:58:32
Summary: Standardize some job state properties common to asynchronous runners in AsynchronousJobState.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/c4535b8d46d5/
Changeset: c4535b8d46d5
Branch: next-stable
User: natefoo
Date: 2013-03-21 18:59:21
Summary: Convert the CLI runner to use job destinations.
Affected #: 2 files
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/929fd34026bf/
Changeset: 929fd34026bf
Branch: next-stable
User: natefoo
Date: 2013-03-22 18:11:57
Summary: Fix URL->destination conversion of jobs running prior to upgrade to destinations.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/b4da62a0e089/
Changeset: b4da62a0e089
Branch: next-stable
User: natefoo
Date: 2013-03-22 18:13:05
Summary: Refactor common operations in to AsynchronousJobSate/AsynchronousJobRunner.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/67c5a41aaaf4/
Changeset: 67c5a41aaaf4
Branch: next-stable
User: natefoo
Date: 2013-03-22 18:14:25
Summary: Convert drmaa runner from URLs to destinations.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/88c55e8ae98e/
Changeset: 88c55e8ae98e
Branch: next-stable
User: natefoo
Date: 2013-03-22 18:14:50
Summary: Convert condor runner from URLs to destinations.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/eb4183e82817/
Changeset: eb4183e82817
Branch: next-stable
User: natefoo
Date: 2013-03-22 22:58:55
Summary: Additional CLI runner fixes for destinations.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/1ea94ed3afa2/
Changeset: 1ea94ed3afa2
Branch: next-stable
User: natefoo
Date: 2013-03-22 23:04:52
Summary: Don't finish jobs that are deleted.
Affected #: 3 files
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/99b5a77e9840/
Changeset: 99b5a77e9840
Branch: next-stable
User: natefoo
Date: 2013-03-22 23:07:40
Summary: Additional examples for the job configuration sample.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/c82a139db1c9/
Changeset: c82a139db1c9
Branch: next-stable
User: natefoo
Date: 2013-03-22 23:08:45
Summary: Merge.
Affected #: 42 files
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/e0da441ad10c/
Changeset: e0da441ad10c
User: natefoo
Date: 2013-03-22 23:09:33
Summary: Merge next-stable.
Affected #: 8 files
Diff not available.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add an expose_api_anonymous decorator to the Galaxy web framework to enable exposing API methods without requiring an API key or a Galaxy or Tool Shed user session.
by commits-noreply@bitbucket.org 22 Mar '13
by commits-noreply@bitbucket.org 22 Mar '13
22 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d1f25e3734f1/
Changeset: d1f25e3734f1
User: greg
Date: 2013-03-22 21:26:07
Summary: Add an expose_api_anonymous decorator to the Galaxy web framework to enable exposing API methods without requiring an API key or a Galaxy or Tool Shed user session.
Affected #: 2 files
diff -r dab01cbc83dceddd9f02a48deb32c7ffb1817eba -r d1f25e3734f1e24cb4655758dbeaf44186f5af9a lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -1,7 +1,16 @@
"""
The Galaxy web application framework
"""
-
-from framework import expose, json, json_pretty, require_login, require_admin, url_for, error, form, FormBuilder, expose_api, expose_api_raw
+from framework import expose
+from framework import json
+from framework import json_pretty
+from framework import require_login
+from framework import require_admin
+from framework import url_for
+from framework import error
+from framework import form
+from framework import FormBuilder
+from framework import expose_api
+from framework import expose_api_anonymous
+from framework import expose_api_raw
from framework.base import httpexceptions
-
diff -r dab01cbc83dceddd9f02a48deb32c7ffb1817eba -r d1f25e3734f1e24cb4655758dbeaf44186f5af9a lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -106,7 +106,13 @@
"""
return expose_api( func, to_json=False )
-def expose_api( func, to_json=True ):
+def expose_api_anonymous( func, to_json=True ):
+ """
+ Expose this function via the API but don't require an API key.
+ """
+ return expose_api( func, to_json=to_json, key_required=False )
+
+def expose_api( func, to_json=True, key_required=True ):
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
def error( environ, start_response ):
@@ -114,7 +120,7 @@
return error_message
error_status = '403 Forbidden'
## If there is a user, we've authenticated a session.
- if not trans.user and isinstance(trans.galaxy_session, Bunch):
+ if key_required and not trans.user and isinstance( trans.galaxy_session, Bunch ):
# If trans.user is already set, don't check for a key.
# This happens when we're authenticating using session instead of an API key.
# The Bunch clause is used to prevent the case where there's no user, but there is a real session.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0