commit/galaxy-central: 4 new changesets
4 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/d960803cbacc/ changeset: d960803cbacc user: jmchilton date: 2012-08-09 22:15:23 summary: JJ's initial work on extending the API to allow operations on groups. affected #: 5 files diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -544,7 +544,10 @@ else: return False -class Group( object ): +class Group( object, APIItem ): + api_collection_visible_keys = ( 'id', 'name' ) + api_element_visible_keys = ( 'id', 'name' ) + def __init__( self, name = None ): self.name = name self.deleted = False diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/api/group_roles.py --- /dev/null +++ b/lib/galaxy/web/api/group_roles.py @@ -0,0 +1,124 @@ +""" +API operations on Group objects. +""" +import logging +from galaxy.web.base.controller import BaseAPIController, url_for +from galaxy import web + +log = logging.getLogger( __name__ ) + +class GroupRolesAPIController( BaseAPIController ): + + @web.expose_api + @web.require_admin + def index( self, trans, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/roles + Displays a collection (list) of groups. + """ + decoded_group_id = trans.security.decode_id( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + rval = [] + try: + for gra in group.roles: + role = gra.role + encoded_id = trans.security.encode_id( role.id ) + rval.append( dict( id = encoded_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=encoded_id, ) ) ) + except Exception, e: + rval = "Error in group API at listing roles" + log.error( rval + ": %s" % str(e) ) + trans.response.status = 500 + return rval + + @web.expose_api + @web.require_admin + def show( self, trans, id, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/roles/{encoded_role_id} + Displays information about a group role. + """ + role_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_role_id = trans.security.decode_id( role_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id ) + for gra in group.roles: + if gra.role == role: + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) # TODO Fix This + if not item: + item = "role %s not in group %s" % (role.name,group.name) + except Exception, e: + item = "Error in group_role API group %s role %s" % (group.name, role.name) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def update( self, trans, id, group_id, **kwd ): + """ + PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id} + Adds a role to a group + """ + role_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_role_id = trans.security.decode_id( role_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id ) + for gra in group.roles: + if gra.role == role: + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) + if not item: + gra = trans.app.model.GroupRoleAssociation( group, role ) + # Add GroupRoleAssociation + trans.sa_session.add( gra ) + trans.sa_session.flush() + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) + except Exception, e: + item = "Error in group_role API Adding role %s to group %s" % (role.name,group.name) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def delete( self, trans, id, group_id, **kwd ): + """ + DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id} + Removes a role from a group + """ + role_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_role_id = trans.security.decode_id( role_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id ) + for gra in group.roles: + if gra.role == role: + trans.sa_session.delete( gra ) + trans.sa_session.flush() + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) + if not item: + item = "role %s not in group %s" % (role.name,group.name) + except Exception, e: + item = "Error in group_role API Removing role %s from group %s" % (role.name,group.name) + log.error(item + ": %s" % str(e)) + return item diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/api/group_users.py --- /dev/null +++ b/lib/galaxy/web/api/group_users.py @@ -0,0 +1,124 @@ +""" +API operations on Group objects. +""" +import logging +from galaxy.web.base.controller import BaseAPIController, url_for +from galaxy import web + +log = logging.getLogger( __name__ ) + +class GroupUsersAPIController( BaseAPIController ): + + @web.expose_api + @web.require_admin + def index( self, trans, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/users + Displays a collection (list) of groups. + """ + decoded_group_id = trans.security.decode_id( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + rval = [] + try: + for uga in group.users: + user = uga.user + encoded_id = trans.security.encode_id( user.id ) + rval.append( dict( id = encoded_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=encoded_id, ) ) ) + except Exception, e: + rval = "Error in group API at listing users" + log.error( rval + ": %s" % str(e) ) + trans.response.status = 500 + return rval + + @web.expose_api + @web.require_admin + def show( self, trans, id, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/users/{encoded_user_id} + Displays information about a group user. + """ + user_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_user_id = trans.security.decode_id( user_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id ) + for uga in group.users: + if uga.user == user: + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) # TODO Fix This + if not item: + item = "user %s not in group %s" % (user.email,group.name) + except Exception, e: + item = "Error in group_user API group %s user %s" % (group.name, user.email) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def update( self, trans, id, group_id, **kwd ): + """ + PUT /api/groups/{encoded_group_id}/users/{encoded_user_id} + Adds a user to a group + """ + user_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_user_id = trans.security.decode_id( user_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id ) + for uga in group.users: + if uga.user == user: + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) + if not item: + uga = trans.app.model.UserGroupAssociation( user, group ) + # Add UserGroupAssociations + trans.sa_session.add( uga ) + trans.sa_session.flush() + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) + except Exception, e: + item = "Error in group_user API Adding user %s to group %s" % (user.email,group.name) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def delete( self, trans, id, group_id, **kwd ): + """ + DELETE /api/groups/{encoded_group_id}/users/{encoded_user_id} + Removes a user from a group + """ + user_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_user_id = trans.security.decode_id( user_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id ) + for uga in group.users: + if uga.user == user: + trans.sa_session.delete( uga ) + trans.sa_session.flush() + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) + if not item: + item = "user %s not in group %s" % (user.email,group.name) + except Exception, e: + item = "Error in group_user API Removing user %s from group %s" % (user.email,group.name) + log.error(item + ": %s" % str(e)) + return item diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/api/groups.py --- /dev/null +++ b/lib/galaxy/web/api/groups.py @@ -0,0 +1,128 @@ +""" +API operations on Group objects. +""" +import logging +from galaxy.web.base.controller import BaseAPIController, url_for +from galaxy import web + + +log = logging.getLogger( __name__ ) + + +class GroupAPIController( BaseAPIController ): + + @web.expose_api + @web.require_admin + def index( self, trans, **kwd ): + """ + GET /api/groups + Displays a collection (list) of groups. + """ + rval = [] + for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted == False ): + if trans.user_is_admin(): + item = group.get_api_value( value_mapper={ 'id': trans.security.encode_id } ) + encoded_id = trans.security.encode_id( group.id ) + item['url'] = url_for( 'group', id=encoded_id ) + rval.append( item ) + return rval + + @web.expose_api + def create( self, trans, payload, **kwd ): + """ + POST /api/groups + Creates a new group. + """ + log.info("groups payload%s\n" % (payload)) + if not trans.user_is_admin(): + trans.response.status = 403 + return "You are not authorized to create a new group." + name = payload.get( 'name', None ) + if not name: + trans.response.status = 400 + return "Enter a valid name" + if trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first(): + trans.response.status = 400 + return "A group with that name already exists" + + group = trans.app.model.Group( name=name ) + trans.sa_session.add( group ) + user_ids = payload.get( 'user_ids', [] ) + for i in user_ids: + log.info("user_id: %s\n" % (i )) + log.info("%s %s\n" % (i, trans.security.decode_id( i ) )) + users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ] + role_ids = payload.get( 'role_ids', [] ) + roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ] + trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users ) + """ + # Create the UserGroupAssociations + for user in users: + trans.app.security_agent.associate_user_group( user, group ) + # Create the GroupRoleAssociations + for role in roles: + trans.app.security_agent.associate_group_role( group, role ) + """ + trans.sa_session.flush() + encoded_id = trans.security.encode_id( group.id ) + item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } ) + item['url'] = url_for( 'group', id=encoded_id ) + return [ item ] + + @web.expose_api + @web.require_admin + def show( self, trans, id, **kwd ): + """ + GET /api/groups/{encoded_group_id} + Displays information about a group. + """ + group_id = id + try: + decoded_group_id = trans.security.decode_id( group_id ) + except TypeError: + trans.response.status = 400 + return "Malformed group id ( %s ) specified, unable to decode." % str( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } ) + item['url'] = url_for( 'group', id=group_id ) + item['users_url'] = url_for( 'group_users', group_id=group_id ) + item['roles_url'] = url_for( 'group_roles', group_id=group_id ) + return item + + @web.expose_api + @web.require_admin + def update( self, trans, id, payload, **kwd ): + """ + PUT /api/groups/{encoded_group_id} + Modifies a group. + """ + group_id = id + try: + decoded_group_id = trans.security.decode_id( group_id ) + except TypeError: + trans.response.status = 400 + return "Malformed group id ( %s ) specified, unable to decode." % str( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + name = payload.get( 'name', None ) + if name: + group.name = name + trans.sa_session.add(group) + user_ids = payload.get( 'user_ids', [] ) + users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ] + role_ids = payload.get( 'role_ids', [] ) + roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ] + trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users,delete_existing_assocs=False ) + trans.sa_session.flush() + diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/buildapp.py --- a/lib/galaxy/web/buildapp.py +++ b/lib/galaxy/web/buildapp.py @@ -122,6 +122,18 @@ 'permissions', path_prefix='/api/libraries/:library_id', parent_resources=dict( member_name='library', collection_name='libraries' ) ) + webapp.api_mapper.resource( 'user', + 'users', + controller='group_users', + name_prefix='group_', + path_prefix='/api/groups/:group_id', + parent_resources=dict( member_name='group', collection_name='groups' ) ) + webapp.api_mapper.resource( 'role', + 'roles', + controller='group_roles', + name_prefix='group_', + path_prefix='/api/groups/:group_id', + parent_resources=dict( member_name='group', collection_name='groups' ) ) webapp.api_mapper.resource( 'dataset', 'datasets', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'library', 'libraries', path_prefix='/api' ) webapp.api_mapper.resource( 'sample', 'samples', path_prefix='/api' ) @@ -129,6 +141,7 @@ webapp.api_mapper.resource( 'form', 'forms', path_prefix='/api' ) webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' ) webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' ) + webapp.api_mapper.resource( 'group', 'groups', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' ) webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' ) https://bitbucket.org/galaxy/galaxy-central/changeset/761c58466d97/ changeset: 761c58466d97 user: jmchilton date: 2012-08-09 22:18:00 summary: Merge. affected #: 19 files diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 buildbot_setup.sh --- a/buildbot_setup.sh +++ b/buildbot_setup.sh @@ -68,8 +68,13 @@ datatypes_conf.xml.sample universe_wsgi.ini.sample tool_data_table_conf.xml.sample +migrated_tools_conf.xml.sample +tool-data/shared/ensembl/builds.txt.sample +tool-data/shared/igv/igv_build_sites.txt.sample +tool-data/shared/ncbi/builds.txt.sample +tool-data/shared/rviewer/rviewer_build_sites.txt.sample tool-data/shared/ucsc/builds.txt.sample -migrated_tools_conf.xml.sample +tool-data/shared/ucsc/publicbuilds.txt.sample " DIRS=" diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 contrib/multiproccess.sh --- a/contrib/multiproccess.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# copy this script to the top level galaxy directory and modify the following -# for your environment - -web_server_names=(web{0..2}) # server names: web0 web1 web2 -runner_server_names=(runner0) # server name: runner0 - -web_config='universe_wsgi.webapp.ini' -runner_config='universe_wsgi.runner.ini' - -# actually do the requested action - -if [ -z "$1" ]; then - echo "usage: multiprocess.sh <--daemon|--stop-daemon>" - exit 1 -fi - -for server_name in ${web_server_names[@]}; do - echo "[$server_name]" - python ./scripts/paster.py serve $web_config --server-name=$server_name --pid-file=$server_name.pid --log-file=$server_name.log $@ -done -for server_name in ${runner_server_names[@]}; do - echo "[$server_name]" - python ./scripts/paster.py serve $runner_config --server-name=$server_name --pid-file=$server_name.pid --log-file=$server_name.log $@ -done diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -2,7 +2,7 @@ Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml). All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed. """ -import urllib2 +import urllib2, tempfile from galaxy.tools import ToolSection from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * @@ -132,7 +132,7 @@ tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections ) for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[ k ] = v - metadata_dict = generate_metadata_using_disk_files( self.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( self.app, relative_install_dir, repository_clone_url ) tool_shed_repository.metadata = metadata_dict self.app.sa_session.add( tool_shed_repository ) self.app.sa_session.flush() @@ -142,7 +142,7 @@ else: tool_dependencies = None if 'tools' in metadata_dict: - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict ) if repository_tools_tups: sample_files = metadata_dict.get( 'sample_files', [] ) @@ -195,7 +195,7 @@ tool_shed_repository.includes_datatypes = True self.app.sa_session.add( tool_shed_repository ) self.app.sa_session.flush() - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() datatypes_config = get_config_from_repository( self.app, 'datatypes_conf.xml', tool_shed_repository, diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -4,6 +4,7 @@ import pkg_resources pkg_resources.require( "simplejson" ) +pkg_resources.require( "Mako" ) import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback import simplejson @@ -2344,7 +2345,7 @@ command_line = command_line.replace( "\n", " " ).replace( "\r", " " ).strip() except Exception, e: # Modify exception message to be more clear - #e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ) ) + #e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ), ) raise if self.interpreter: # TODO: path munging for cluster/dataset server relocatability @@ -2441,7 +2442,7 @@ if code: return code( *args, **kwargs ) except Exception, e: - e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ) ) + e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ), ) raise def exec_before_job( self, app, inp_data, out_data, param_dict={} ): pass diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -3,6 +3,7 @@ from datetime import date, datetime, timedelta from time import strftime, gmtime from galaxy import util +from galaxy.tools import parameters from galaxy.datatypes.checkers import * from galaxy.util.json import * from galaxy.tools.search import ToolBoxSearch @@ -247,6 +248,52 @@ except: pass return converter_path, display_path +def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ): + """ + Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make + sure the files exist. + """ + invalid_files_and_errors_tups = [] + correction_msg = '' + for input_param in tool.input_params: + if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic: + # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist. + options = input_param.dynamic_options or input_param.options + if options: + if options.tool_data_table or options.missing_tool_data_table_name: + # Make sure the repository contains a tool_data_table_conf.xml.sample file. + sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir ) + if sample_tool_data_table_conf: + error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf ) + if error: + invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) ) + else: + options.missing_tool_data_table_name = None + else: + correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample " + correction_msg += "to the repository that includes the required entry to correct this error.<br/>" + invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) ) + if options.index_file or options.missing_index_file: + # Make sure the repository contains the required xxx.loc.sample file. + index_file = options.index_file or options.missing_index_file + index_file_name = strip_path( index_file ) + sample_found = False + for sample_file in sample_files: + sample_file_name = strip_path( sample_file ) + if sample_file_name == '%s.sample' % index_file_name: + options.index_file = index_file_name + options.missing_index_file = None + if options.tool_data_table: + options.tool_data_table.missing_index_file = None + sample_found = True + break + if not sample_found: + correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file ) + correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) + invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) ) + # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. + reset_tool_data_tables( app ) + return invalid_files_and_errors_tups def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ): # Persist the current in-memory list of config_elems to a file named by the value of config_filename. fd, filename = tempfile.mkstemp() @@ -383,7 +430,7 @@ def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision, set_status=True ): # Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository. tool_dependency_objects = [] - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = get_config_from_repository( app, 'tool_dependencies.xml', @@ -501,6 +548,76 @@ if not can_generate_dependency_metadata: break return can_generate_dependency_metadata +def generate_metadata_for_changeset_revision( app, repository_files_dir, repository_clone_url ): + """ + Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip, + the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's + disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone). + """ + metadata_dict = {} + invalid_file_tups = [] + invalid_tool_configs = [] + tool_dependencies_config = None + datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir ) + if datatypes_config: + metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) + sample_files = get_sample_files_from_disk( repository_files_dir ) + if sample_files: + metadata_dict[ 'sample_files' ] = sample_files + # Find all tool configs and exported workflows. + for root, dirs, files in os.walk( repository_files_dir ): + if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: + if '.hg' in dirs: + dirs.remove( '.hg' ) + for name in files: + # Find all tool configs. + if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ): + full_path = os.path.abspath( os.path.join( root, name ) ) + if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] + or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): + try: + # Make sure we're looking at a tool config and not a display application config or something else. + element_tree = util.parse_xml( full_path ) + element_tree_root = element_tree.getroot() + is_tool = element_tree_root.tag == 'tool' + except Exception, e: + print "Error parsing %s", full_path, ", exception: ", str( e ) + is_tool = False + if is_tool: + try: + tool = app.toolbox.load_tool( full_path ) + except Exception, e: + tool = None + invalid_tool_configs.append( name ) + if tool is not None: + invalid_files_and_errors_tups = check_tool_input_params( app, repository_files_dir, name, tool, sample_files ) + can_set_metadata = True + for tup in invalid_files_and_errors_tups: + if name in tup: + can_set_metadata = False + invalid_tool_configs.append( name ) + break + if can_set_metadata: + metadata_dict = generate_tool_metadata( os.path.join( root, name ), tool, repository_clone_url, metadata_dict ) + else: + invalid_file_tups.extend( invalid_files_and_errors_tups ) + # Find all exported workflows + elif name.endswith( '.ga' ): + relative_path = os.path.join( root, name ) + fp = open( relative_path, 'rb' ) + workflow_text = fp.read() + fp.close() + exported_workflow_dict = from_json_string( workflow_text ) + if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': + metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) + if 'tools' in metadata_dict: + # This step must be done after metadata for tools has been defined. + tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir ) + if tool_dependencies_config: + metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict ) + if invalid_tool_configs: + metadata_dict [ 'invalid_tools' ] = invalid_tool_configs + return metadata_dict, invalid_file_tups def generate_package_dependency_metadata( elem, tool_dependencies_dict ): """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set.""" requirements_dict = {} @@ -517,58 +634,6 @@ if requirements_dict: tool_dependencies_dict[ dependency_key ] = requirements_dict return tool_dependencies_dict -def generate_metadata_using_disk_files( toolbox, relative_install_dir, repository_clone_url ): - """Generate metadata using only the repository files on disk - files are not retrieved from the repository manifest.""" - metadata_dict = {} - tool_dependencies_config = None - datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir ) - if datatypes_config: - metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) - sample_files = get_sample_files_from_disk( relative_install_dir ) - if sample_files: - metadata_dict[ 'sample_files' ] = sample_files - # Find all tool configs and exported workflows. - for root, dirs, files in os.walk( relative_install_dir ): - if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: - if '.hg' in dirs: - dirs.remove( '.hg' ) - for name in files: - # Find all tool configs. - if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ): - full_path = os.path.abspath( os.path.join( root, name ) ) - if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] - or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): - try: - # Make sure we're looking at a tool config and not a display application config or something else. - element_tree = util.parse_xml( full_path ) - element_tree_root = element_tree.getroot() - is_tool = element_tree_root.tag == 'tool' - except Exception, e: - log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) ) - is_tool = False - if is_tool: - try: - tool = toolbox.load_tool( full_path ) - except Exception, e: - tool = None - if tool is not None: - tool_config = os.path.join( root, name ) - metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) - # Find all exported workflows - elif name.endswith( '.ga' ): - relative_path = os.path.join( root, name ) - fp = open( relative_path, 'rb' ) - workflow_text = fp.read() - fp.close() - exported_workflow_dict = from_json_string( workflow_text ) - if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) - if 'tools' in metadata_dict: - # This step must be done after metadata for tools has been defined. - tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir ) - if tool_dependencies_config: - metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict ) - return metadata_dict def generate_tool_guid( repository_clone_url, tool ): """ Generate a guid for the installed tool. It is critical that this guid matches the guid for @@ -1266,7 +1331,7 @@ def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ): # Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later. metadata = repository.metadata - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() repository_dict = None datatypes_config = get_config_from_repository( app, 'datatypes_conf.xml', @@ -1293,17 +1358,6 @@ def load_installed_display_applications( app, installed_repository_dict, deactivate=False ): # Load or deactivate proprietary datatype display applications app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate ) -def make_tmp_directory(): - tmp_dir = os.getenv( 'TMPDIR', '' ) - if tmp_dir: - tmp_dir = tmp_dir.strip() - else: - home_dir = os.getenv( 'HOME' ) - tmp_dir = os.path.join( home_dir, 'tmp' ) - work_dir = os.path.join( tmp_dir, 'work_tmp' ) - if not os.path.exists( work_dir ): - os.makedirs( work_dir ) - return work_dir def open_repository_files_folder( trans, folder_path ): try: files_list = get_repository_files( trans, folder_path ) diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/web/controllers/admin_toolshed.py --- a/lib/galaxy/web/controllers/admin_toolshed.py +++ b/lib/galaxy/web/controllers/admin_toolshed.py @@ -1,4 +1,4 @@ -import urllib2 +import urllib2, tempfile from galaxy.web.controllers.admin import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.shed_util import * @@ -522,7 +522,7 @@ # Get the tool_shed_repository from one of the tool_dependencies. message = '' tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = get_config_from_repository( trans.app, 'tool_dependencies.xml', @@ -654,7 +654,7 @@ message += "from the installed repository's <b>Repository Actions</b> menu. " status = 'error' if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata: - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() # Install tool dependencies. update_tool_shed_repository_status( trans.app, tool_shed_repository, @@ -684,7 +684,7 @@ Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed) when an admin is installing a new repository or reinstalling an uninstalled repository. """ - metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url ) tool_shed_repository.metadata = metadata_dict trans.sa_session.add( tool_shed_repository ) trans.sa_session.flush() @@ -695,7 +695,7 @@ repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict ) if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically generated select lists. - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_shed_repository, tool_shed_repository.changeset_revision, @@ -726,7 +726,7 @@ tool_shed_repository.includes_datatypes = True trans.sa_session.add( tool_shed_repository ) trans.sa_session.flush() - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() datatypes_config = get_config_from_repository( trans.app, 'datatypes_conf.xml', tool_shed_repository, @@ -779,7 +779,7 @@ message = "The repository information has been updated." elif params.get( 'set_metadata_button', False ): repository_clone_url = generate_clone_url( trans, repository ) - metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url ) if metadata_dict: repository.metadata = metadata_dict trans.sa_session.add( repository ) @@ -1479,7 +1479,7 @@ update_repository( repo, latest_ctx_rev ) # Update the repository metadata. tool_shed = clean_tool_shed_url( tool_shed_url ) - metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url ) repository.metadata = metadata_dict # Update the repository changeset_revision in the database. repository.changeset_revision = latest_changeset_revision diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/web/controllers/data_admin.py --- a/lib/galaxy/web/controllers/data_admin.py +++ b/lib/galaxy/web/controllers/data_admin.py @@ -30,25 +30,28 @@ @web.expose @web.require_admin def manage_data( self, trans, **kwd ): - genomes = dict() if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False: - return trans.fill_template( '/admin/data_admin/betajob.mako' ) - for line in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data: - defstate = dict( state='Generate', style=self.jobstyles[ 'new' ] ) - indexers = dict( bowtie_indexes=defstate, bowtie2_indexes=defstate, bwa_indexes=defstate, perm_base_indexes=defstate, srma_indexes=defstate, sam_fa_indexes=defstate ) - dbkey = line[0] - name = line[2] - indexers[ 'name' ] = name - indexers[ 'fapath' ] = line[3] - genomes[ dbkey ] = indexers - for table in [ 'bowtie_indexes', 'bowtie2_indexes', 'bwa_indexes', 'srma_indexes' ]: - for line in trans.app.tool_data_tables.data_tables[ table ].data: - dbkey = line[0] - genomes[ dbkey ][ table ] = dict( state='Generated', style=self.jobstyles[ 'done' ] ) - for line in trans.app.tool_data_tables.data_tables[ 'sam_fa_indexes' ].data: - genomes[ line[1] ][ 'sam_fa_indexes' ] = dict( state='Generated', style=self.jobstyles[ 'done' ] ) - for line in trans.app.tool_data_tables.data_tables[ 'perm_base_indexes' ].data: - genomes[ line[1].split(':')[0] ][ 'perm_base_indexes' ] = dict( state='Generated', style=self.jobstyles[ 'done' ] ) + return trans.fill_template( '/admin/data_admin/generic_error.mako', message='This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.' ) + if 'all_fasta' not in trans.app.tool_data_tables.data_tables: + return trans.fill_template( '/admin/data_admin/generic_error.mako', message='The local data manager requires that an all_fasta entry exists in your tool_data_table_conf.xml.' ) + indextable = {} + dbkeys = [] + labels = { 'bowtie_indexes': 'Bowtie', 'bowtie2_indexes': 'Bowtie 2', 'bwa_indexes': 'BWA', 'srma_indexes': 'Picard', 'sam_fa_indexes': 'SAM', 'perm_base_indexes': 'PerM' } + tablenames = { 'Bowtie': 'bowtie_indexes', 'Bowtie 2': 'bowtie2_indexes', 'BWA': 'bwa_indexes', 'Picard': 'srma_indexes', 'SAM': 'sam_fa_indexes', 'PerM': 'perm_base_indexes' } + indexfuncs = dict( bowtie_indexes='bowtie', bowtie2_indexes='bowtie2', bwa_indexes='bwa', srma_indexes='picard', sam_fa_indexes='sam', perm_base_indexes='perm' ) + for genome in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data: + dbkey = genome[0] + dbkeys.append( dbkey ) + indextable[ dbkey ] = dict( indexes=dict(), name=genome[2], path=genome[3] ) + for genome in indextable: + for label in labels: + indextable[ genome ][ 'indexes' ][ label ] = 'Generate' + if label not in trans.app.tool_data_tables.data_tables: + indextable[ genome ][ 'indexes' ][ label ] = 'Disabled' + else: + for row in trans.app.tool_data_tables.data_tables[ label ].data: + if genome in row or row[0].startswith( genome ): + indextable[ genome ][ 'indexes' ][ label ] = 'Generated' jobgrid = [] sa_session = trans.app.model.context.current jobs = sa_session.query( model.GenomeIndexToolData ).order_by( model.GenomeIndexToolData.created_time.desc() ).filter_by( user_id=trans.get_user().id ).group_by( model.GenomeIndexToolData.deferred ).limit( 20 ).all() @@ -65,7 +68,8 @@ jobtype = 'index' indexers = ', '.join( params['indexes'] ) jobgrid.append( dict( jobtype=jobtype, indexers=indexers, rowclass=state, deferred=job.deferred.id, state=state, intname=job.deferred.params[ 'intname' ], dbkey=job.deferred.params[ 'dbkey' ] ) ) - return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, genomes=genomes ) + styles = dict( Generate=self.jobstyles['new'], Generated=self.jobstyles['ok'], Disabled=self.jobstyles['error'] ) + return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, indextable=indextable, labels=labels, dbkeys=dbkeys, styles=styles, indexfuncs=indexfuncs ) @web.expose @web.require_admin diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/web/controllers/workflow.py --- a/lib/galaxy/web/controllers/workflow.py +++ b/lib/galaxy/web/controllers/workflow.py @@ -655,7 +655,7 @@ 'name': module.get_name(), 'tool_id': module.get_tool_id(), 'tool_state': module.get_state(), - 'tooltip': module.get_tooltip(), + 'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ), 'data_inputs': module.get_data_inputs(), 'data_outputs': module.get_data_outputs(), 'form_html': module.get_config_form(), @@ -732,7 +732,7 @@ 'tool_id': module.get_tool_id(), 'name': module.get_name(), 'tool_state': module.get_state(), - 'tooltip': module.get_tooltip(), + 'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ), 'tool_errors': module.get_errors(), 'data_inputs': module.get_data_inputs(), 'data_outputs': module.get_data_outputs(), diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ b/lib/galaxy/webapps/community/controllers/common.py @@ -1,15 +1,13 @@ -import os, string, socket, logging, simplejson, binascii +import os, string, socket, logging, simplejson, binascii, tempfile from time import strftime from datetime import * from galaxy.datatypes.checkers import * from galaxy.tools import * from galaxy.util.json import from_json_string, to_json_string from galaxy.util.hash_util import * -from galaxy.util.shed_util import clone_repository, copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata -from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_config_from_disk, get_configured_ui -from galaxy.util.shed_util import get_named_tmpfile_from_ctx, get_sample_files_from_disk, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH -from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path, to_html_escaped -from galaxy.util.shed_util import to_html_str, update_repository +from galaxy.util.shed_util import clone_repository, generate_metadata_for_changeset_revision, get_changectx_for_changeset, get_config_from_disk +from galaxy.util.shed_util import get_configured_ui, get_named_tmpfile_from_ctx, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH +from galaxy.util.shed_util import reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path from galaxy.web.base.controller import * from galaxy.webapps.community import model from galaxy.model.orm import * @@ -107,11 +105,8 @@ trans.sa_session.flush() return item_rating -## ---- Utility methods ------------------------------------------------------- - def add_repository_metadata_tool_versions( trans, id, changeset_revisions ): - # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } - # pairs for each tool in each changeset revision. + # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. for index, changeset_revision in enumerate( changeset_revisions ): tool_versions_dict = {} repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) @@ -120,57 +115,24 @@ if metadata: tool_dicts = metadata.get( 'tools', [] ) if index == 0: - # The first changset_revision is a special case because it will have no ancestor - # changeset_revisions in which to match tools. The parent tool id for tools in - # the first changeset_revision will be the "old_id" in the tool config. + # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools. + # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config. for tool_dict in tool_dicts: tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ] else: for tool_dict in tool_dicts: # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] ) + parent_id = get_parent_id( trans, + id, + tool_dict[ 'id' ], + tool_dict[ 'version' ], + tool_dict[ 'guid' ], + changeset_revisions[ 0:index ] ) tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id if tool_versions_dict: repository_metadata.tool_versions = tool_versions_dict trans.sa_session.add( repository_metadata ) trans.sa_session.flush() -def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True ): - """Build a SelectField whose options are the changeset_rev strings of all downloadable revisions of the received repository.""" - repo = hg.repository( get_configured_ui(), repository.repo_path ) - options = [] - changeset_tups = [] - refresh_on_change_values = [] - for repository_metadata in repository.downloadable_revisions: - changeset_revision = repository_metadata.changeset_revision - ctx = get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - rev = '%04d' % ctx.rev() - label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - rev = '-1' - label = "-1:%s" % changeset_revision - changeset_tups.append( ( rev, label, changeset_revision ) ) - refresh_on_change_values.append( changeset_revision ) - # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time, - # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time. - for changeset_tup in sorted( changeset_tups ): - # Display the latest revision first. - options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) ) - if add_id_to_name: - name = 'changeset_revision_%d' % repository.id - else: - name = 'changeset_revision' - select_field = SelectField( name=name, - refresh_on_change=True, - refresh_on_change_values=refresh_on_change_values ) - for option_tup in options: - selected = selected_value and option_tup[1] == selected_value - select_field.add_option( option_tup[0], option_tup[1], selected=selected ) - return select_field -def changeset_is_downloadable( metadata_dict ): - # A RepositoryMetadata record will be created if metadata_dict includes only invalid stuff like 'invalid_tools', but in this case - # it won't be downloadable. - return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict def changeset_is_malicious( trans, id, changeset_revision, **kwd ): """Check the malicious flag in repository metadata for a specified change set""" repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) @@ -188,60 +150,12 @@ if user_email in admin_users: return True return False -def check_tool_input_params( trans, repo_dir, tool_config, tool, sample_files, invalid_files ): - """ - Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make - sure the files exist. - """ - can_set_metadata = True - correction_msg = '' - for input_param in tool.input_params: - if isinstance( input_param, tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic: - # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist. - options = input_param.dynamic_options or input_param.options - if options: - if options.tool_data_table or options.missing_tool_data_table_name: - # Make sure the repository contains a tool_data_table_conf.xml.sample file. - sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir ) - if sample_tool_data_table_conf: - error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_tool_data_table_conf ) - if error: - can_set_metadata = False - invalid_files.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) ) - else: - options.missing_tool_data_table_name = None - else: - can_set_metadata = False - correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample " - correction_msg += "to the repository that includes the required entry to correct this error.<br/>" - invalid_files.append( ( tool_config, correction_msg ) ) - if options.index_file or options.missing_index_file: - # Make sure the repository contains the required xxx.loc.sample file. - index_file = options.index_file or options.missing_index_file - index_file_name = strip_path( index_file ) - sample_found = False - for sample_file in sample_files: - sample_file_name = strip_path( sample_file ) - if sample_file_name == '%s.sample' % index_file_name: - options.index_file = index_file_name - options.missing_index_file = None - if options.tool_data_table: - options.tool_data_table.missing_index_file = None - sample_found = True - break - if not sample_found: - can_set_metadata = False - correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file ) - correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) - invalid_files.append( ( tool_config, correction_msg ) ) - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) - return can_set_metadata, invalid_files def clean_repository_metadata( trans, id, changeset_revisions ): # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions. # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later.. - changeset_revisions_checked = [] + changeset_revisions_checked = [] + cleaned_changeset_revisions = [] for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision, @@ -251,6 +165,9 @@ if can_delete: trans.sa_session.delete( repository_metadata ) trans.sa_session.flush() + else: + cleaned_changeset_revisions.append( changeset_revision ) + return cleaned_changeset_revisions def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only @@ -369,14 +286,19 @@ return file_path return None def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ): + downloadable = is_downloadable( metadata_dict ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: repository_metadata.metadata = metadata_dict + repository_metadata.downloadable = downloadable else: - repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict ) - repository_metadata.downloadable = changeset_is_downloadable( metadata_dict ) + repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id, + changeset_revision=changeset_revision, + metadata=metadata_dict, + downloadable=downloadable ) trans.sa_session.add( repository_metadata ) trans.sa_session.flush() + return repository_metadata def generate_clone_url( trans, repository_id ): """Generate the URL for cloning a repository.""" repository = get_repository( trans, repository_id ) @@ -387,69 +309,6 @@ return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) else: return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name ) -def generate_metadata_for_changeset_revision( trans, repository_files_dir, repository_clone_url ): - """ - Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip, - the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's - disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone). - """ - metadata_dict = {} - invalid_files = [] - invalid_tool_configs = [] - tool_dependencies_config = None - datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir ) - if datatypes_config: - metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict ) - sample_files = get_sample_files_from_disk( repository_files_dir ) - if sample_files: - metadata_dict[ 'sample_files' ] = sample_files - # Find all tool configs and exported workflows. - for root, dirs, files in os.walk( repository_files_dir ): - if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: - if '.hg' in dirs: - dirs.remove( '.hg' ) - for name in files: - # Find all tool configs. - if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ): - full_path = os.path.abspath( os.path.join( root, name ) ) - if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ] - or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ): - try: - # Make sure we're looking at a tool config and not a display application config or something else. - element_tree = util.parse_xml( full_path ) - element_tree_root = element_tree.getroot() - is_tool = element_tree_root.tag == 'tool' - except Exception, e: - print "Error parsing %s", full_path, ", exception: ", str( e ) - is_tool = False - if is_tool: - try: - tool = trans.app.toolbox.load_tool( full_path ) - tool_config = os.path.join( root, name ) - except Exception, e: - tool = None - invalid_tool_configs.append( name ) - if tool is not None: - can_set_metadata, invalid_files = check_tool_input_params( trans, repository_files_dir, tool_config, tool, sample_files, invalid_files ) - if can_set_metadata: - metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ) - # Find all exported workflows - elif name.endswith( '.ga' ): - relative_path = os.path.join( root, name ) - fp = open( relative_path, 'rb' ) - workflow_text = fp.read() - fp.close() - exported_workflow_dict = from_json_string( workflow_text ) - if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true': - metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ) - if 'tools' in metadata_dict: - # This step must be done after metadata for tools has been defined. - tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir ) - if tool_dependencies_config: - metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict ) - if invalid_tool_configs: - metadata_dict [ 'invalid_tools' ] = invalid_tool_configs - return metadata_dict, invalid_files def generate_tool_guid( trans, repository, tool ): """ Generate a guid for the received tool. The form of the guid is @@ -588,10 +447,23 @@ .first() def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ): """Get metadata for a specified repository change set from the database""" - return trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ), - trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \ - .first() + # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow + # creatd in the past. This may or may not be resolved, so when it is confirmed that the cause of duplicate records has been corrected, tweak + # this method accordingly. + all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ), + trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \ + .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \ + .all() + if len( all_metadata_records ) > 1: + # Delete all recrds older than the last one updated. + for repository_metadata in all_metadata_records[ 1: ]: + trans.sa_session.delete( repository_metadata ) + trans.sa_session.flush() + return all_metadata_records[ 0 ] + elif all_metadata_records: + return all_metadata_records[ 0 ] + return None def get_repository_metadata_by_id( trans, id ): """Get repository metadata from the database""" return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) ) @@ -700,6 +572,8 @@ util.send_mail( frm, to, subject, body, trans.app.config ) except Exception, e: log.exception( "An error occurred sending a tool shed repository update alert by email." ) +def is_downloadable( metadata_dict ): + return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict def load_tool( trans, config_file ): """Load a single tool from the file named by `config_file` and return an instance of `Tool`.""" # Parse XML configuration file and get the root element @@ -762,7 +636,7 @@ ctx = get_changectx_for_changeset( repo, changeset_revision ) tool = None message = '' - work_dir = make_tmp_directory() + work_dir = tempfile.mkdtemp() sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir ) if sample_files: trans.app.config.tool_data_path = work_dir @@ -913,7 +787,7 @@ print "Cloning repository revision: ", str( ctx.rev() ) clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) ) print "Generating metadata for changset revision: ", str( ctx.rev() ) - current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, work_dir, repository_clone_url ) + current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, work_dir, repository_clone_url ) if current_metadata_dict: if not metadata_changeset_revision and not metadata_dict: # We're at the first change set in the change log. @@ -935,7 +809,7 @@ elif comparison == 'not equal and not subset': metadata_changeset_revision = ancestor_changeset_revision metadata_dict = ancestor_metadata_dict - create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) + repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) changeset_revisions.append( metadata_changeset_revision ) ancestor_changeset_revision = current_changeset_revision ancestor_metadata_dict = current_metadata_dict @@ -947,7 +821,7 @@ metadata_changeset_revision = current_changeset_revision metadata_dict = current_metadata_dict # We're at the end of the change log. - create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) + repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) changeset_revisions.append( metadata_changeset_revision ) ancestor_changeset_revision = None ancestor_metadata_dict = None @@ -958,7 +832,7 @@ metadata_dict = ancestor_metadata_dict if not ctx.children(): # We're at the end of the change log. - create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) + repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict ) changeset_revisions.append( metadata_changeset_revision ) ancestor_changeset_revision = None ancestor_metadata_dict = None @@ -968,8 +842,9 @@ except: pass # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions. - clean_repository_metadata( trans, id, changeset_revisions ) - add_repository_metadata_tool_versions( trans, id, changeset_revisions ) + cleaned_changeset_revisions = clean_repository_metadata( trans, id, changeset_revisions ) + # Set tool version information for all downloadable changeset revisions. + add_repository_metadata_tool_versions( trans, id, cleaned_changeset_revisions ) def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): """ Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset @@ -980,46 +855,60 @@ repository_clone_url = generate_clone_url( trans, trans.security.encode_id( repository.id ) ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) - metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, repo_dir, repository_clone_url ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url ) if metadata_dict: + downloadable = is_downloadable( metadata_dict ) + repository_metadata = None if new_tool_metadata_required( trans, repository, metadata_dict ) or new_workflow_metadata_required( trans, repository, metadata_dict ): # Create a new repository_metadata table row. - repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict ) - trans.sa_session.add( repository_metadata ) - try: - trans.sa_session.flush() - # If this is the first record stored for this repository, see if we need to send any email alerts. - if len( repository.downloadable_revisions ) == 1: - handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) - except TypeError, e: - message = "Unable to save metadata for this repository, exception: %s" % str( e ) - status = 'error' + repository_metadata = create_or_update_repository_metadata( trans, + trans.security.encode_id( repository.id ), + repository, + repository.tip, + metadata_dict ) + # If this is the first record stored for this repository, see if we need to send any email alerts. + if len( repository.downloadable_revisions ) == 1: + handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) else: repository_metadata = get_latest_repository_metadata( trans, repository.id ) if repository_metadata: + downloadable = is_downloadable( metadata_dict ) # Update the last saved repository_metadata table row. repository_metadata.changeset_revision = repository.tip repository_metadata.metadata = metadata_dict - repository_metadata.downloadable = changeset_is_downloadable( metadata_dict ) + repository_metadata.downloadable = downloadable trans.sa_session.add( repository_metadata ) trans.sa_session.flush() else: # There are no tools in the repository, and we're setting metadata on the repository tip. - repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict ) - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - elif len( repo ) == 1 and not invalid_files: + repository_metadata = create_or_update_repository_metadata( trans, + trans.security.encode_id( repository.id ), + repository, + repository.tip, + metadata_dict ) + if 'tools' in metadata_dict and repository_metadata and status != 'error': + # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog. + downloadable_changeset_revisions = [ rm.changeset_revision for rm in repository.downloadable_revisions ] + changeset_revisions = [] + for changeset in repo.changelog: + changeset_revision = str( repo.changectx( changeset ) ) + if changeset_revision in downloadable_changeset_revisions: + changeset_revisions.append( changeset_revision ) + # Now append the latest changeset_revision we just updated above. + changeset_revisions.append( repository_metadata.changeset_revision ) + add_repository_metadata_tool_versions( trans, trans.security.encode_id( repository.id ), changeset_revisions ) + elif len( repo ) == 1 and not invalid_file_tups: message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip ) message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." status = "error" - if invalid_files: + if invalid_file_tups: if metadata_dict: message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip ) message += "Correct the following problems if necessary and reset metadata.<br/>" else: message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip ) message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>" - for itc_tup in invalid_files: + for itc_tup in invalid_file_tups: tool_file, exception_msg = itc_tup if exception_msg.find( 'No such file or directory' ) >= 0: exception_items = exception_msg.split() diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -9,8 +9,8 @@ from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.util.json import from_json_string, to_json_string from galaxy.model.orm import * -from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, make_tmp_directory, NOT_TOOL_CONFIGS -from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path +from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFIGS +from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path, to_html_escaped, update_repository from galaxy.tool_shed.encoding_util import * from common import * @@ -113,7 +113,7 @@ grids.GridColumn.__init__( self, col_name ) def get_value( self, trans, grid, repository ): """Display a SelectField whose options are the changeset_revision strings of all downloadable_revisions of this repository.""" - select_field = build_changeset_revision_select_field( trans, repository ) + select_field = build_changeset_revision_select_field( trans, repository, downloadable_only=False ) if len( select_field.options ) > 1: return select_field.get_html() return repository.revision @@ -268,7 +268,7 @@ grids.GridColumn.__init__( self, col_name ) def get_value( self, trans, grid, repository ): """Display a SelectField whose options are the changeset_revision strings of all download-able revisions of this repository.""" - select_field = build_changeset_revision_select_field( trans, repository ) + select_field = build_changeset_revision_select_field( trans, repository, downloadable_only=True ) if len( select_field.options ) > 1: return select_field.get_html() return repository.revision @@ -1346,19 +1346,14 @@ message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'error' ) webapp = get_webapp( trans, **kwd ) + repository_clone_url = generate_clone_url( trans, repository_id ) repository = get_repository( trans, repository_id ) repo_dir = repository.repo_path repo = hg.repository( get_configured_ui(), repo_dir ) ctx = get_changectx_for_changeset( repo, changeset_revision ) invalid_message = '' - metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans, - repo, - repository_id, - ctx, - changeset_revision, - repo_dir, - updating_tip=changeset_revision==repository.tip ) - for invalid_file_tup in invalid_files: + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url ) + for invalid_file_tup in invalid_file_tups: invalid_tool_config, invalid_msg = invalid_file_tup invalid_tool_config_name = strip_path( invalid_tool_config ) if tool_config == invalid_tool_config_name: @@ -1554,7 +1549,8 @@ changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, - add_id_to_name=False ) + add_id_to_name=False, + downloadable_only=False ) revision_label = get_revision_label( trans, repository, changeset_revision ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: @@ -1657,7 +1653,8 @@ changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, - add_id_to_name=False ) + add_id_to_name=False, + downloadable_only=False ) return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako', repository=repository, repository_metadata_id=repository_metadata_id, @@ -2128,7 +2125,8 @@ changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, - add_id_to_name=False ) + add_id_to_name=False, + downloadable_only=False ) revision_label = get_revision_label( trans, repository, changeset_revision ) repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) if repository_metadata: @@ -2185,7 +2183,8 @@ changeset_revision_select_field = build_changeset_revision_select_field( trans, repository, selected_value=changeset_revision, - add_id_to_name=False ) + add_id_to_name=False, + downloadable_only=False ) return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako", repository=repository, tool=tool, @@ -2197,3 +2196,42 @@ webapp=webapp, message=message, status=status ) + +# ----- Utility methods ----- +def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True, downloadable_only=False ): + """Build a SelectField whose options are the changeset_rev strings of all downloadable revisions of the received repository.""" + repo = hg.repository( get_configured_ui(), repository.repo_path ) + options = [] + changeset_tups = [] + refresh_on_change_values = [] + if downloadable_only: + repository_metadata_revisions = repository.downloadable_revisions + else: + repository_metadata_revisions = repository.metadata_revisions + for repository_metadata in repository_metadata_revisions: + changeset_revision = repository_metadata.changeset_revision + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + rev = '%04d' % ctx.rev() + label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + rev = '-1' + label = "-1:%s" % changeset_revision + changeset_tups.append( ( rev, label, changeset_revision ) ) + refresh_on_change_values.append( changeset_revision ) + # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time, + # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time. + for changeset_tup in sorted( changeset_tups ): + # Display the latest revision first. + options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) ) + if add_id_to_name: + name = 'changeset_revision_%d' % repository.id + else: + name = 'changeset_revision' + select_field = SelectField( name=name, + refresh_on_change=True, + refresh_on_change_values=refresh_on_change_values ) + for option_tup in options: + selected = selected_value and option_tup[1] == selected_value + select_field.add_option( option_tup[0], option_tup[1], selected=selected ) + return select_field diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/webapps/community/model/__init__.py --- a/lib/galaxy/webapps/community/model/__init__.py +++ b/lib/galaxy/webapps/community/model/__init__.py @@ -166,12 +166,13 @@ fp.close() class RepositoryMetadata( object ): - def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False ): + def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False ): self.repository_id = repository_id self.changeset_revision = changeset_revision self.metadata = metadata or dict() self.tool_versions = tool_versions or dict() self.malicious = malicious + self.downloadable = downloadable class ItemRatingAssociation( object ): def __init__( self, id=None, user=None, item=None, rating=0, comment='' ): diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/workflow/modules.py --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -49,7 +49,7 @@ return self.name def get_tool_id( self ): return None - def get_tooltip( self ): + def get_tooltip( self, static_path='' ): return None ## ---- Configuration time ----------------------------------------------- @@ -258,8 +258,8 @@ return self.state.encode( self.tool, self.trans.app, secure=secure ) def get_errors( self ): return self.errors - def get_tooltip( self ): - return self.tool.help + def get_tooltip( self, static_path='' ): + return self.tool.help.render( static_path=static_path ) def get_data_inputs( self ): data_inputs = [] def callback( input, value, prefixed_name, prefixed_label ): diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/admin/data_admin/betajob.mako --- a/templates/admin/data_admin/betajob.mako +++ /dev/null @@ -1,35 +0,0 @@ -<%inherit file="/base.mako"/> -<%namespace file="/message.mako" import="render_msg" /> -<%namespace file="/library/common/common.mako" import="common_javascripts" /> - -<%! - def inherit(context): - if context.get('use_panels'): - return '/webapps/galaxy/base_panels.mako' - else: - return '/base.mako' -%> -<%inherit file="${inherit(context)}"/> - -<%def name="init()"> -<% - self.has_left_panel=False - self.has_right_panel=False - self.message_box_visible=False - self.active_view="user" - self.overlay_visible=False - self.has_accessible_datasets = False -%> -</%def> -<%def name="stylesheets()"> - ${parent.stylesheets()} - ${h.css( "autocomplete_tagging" )} -</%def> -<%def name="javascripts()"> - ${parent.javascripts()} - ${h.js("jquery.autocomplete", "autocomplete_tagging" )} -</%def> -## -## Override methods from base.mako and base_panels.mako -## -<p class="panel-error-message">This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.</p> \ No newline at end of file diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/admin/data_admin/generic_error.mako --- /dev/null +++ b/templates/admin/data_admin/generic_error.mako @@ -0,0 +1,35 @@ +<%inherit file="/base.mako"/> +<%namespace file="/message.mako" import="render_msg" /> +<%namespace file="/library/common/common.mako" import="common_javascripts" /> + +<%! + def inherit(context): + if context.get('use_panels'): + return '/webapps/galaxy/base_panels.mako' + else: + return '/base.mako' +%> +<%inherit file="${inherit(context)}"/> + +<%def name="init()"> +<% + self.has_left_panel=False + self.has_right_panel=False + self.message_box_visible=False + self.active_view="user" + self.overlay_visible=False + self.has_accessible_datasets = False +%> +</%def> +<%def name="stylesheets()"> + ${parent.stylesheets()} + ${h.css( "autocomplete_tagging" )} +</%def> +<%def name="javascripts()"> + ${parent.javascripts()} + ${h.js("jquery.autocomplete", "autocomplete_tagging" )} +</%def> +## +## Override methods from base.mako and base_panels.mako +## +<p class="panel-error-message">${message}</p> \ No newline at end of file diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/admin/data_admin/local_data.mako --- a/templates/admin/data_admin/local_data.mako +++ b/templates/admin/data_admin/local_data.mako @@ -44,6 +44,7 @@ td, th { padding-left: 10px; padding-right: 10px; } td.state-color-new { text-decoration: underline; } td.panel-done-message { background-image: none; padding: 0px 10px 0px 10px; } + td.panel-error-message { background-image: none; padding: 0px 10px 0px 10px; } </style><div class="toolForm"> %if message: @@ -52,19 +53,23 @@ <div class="toolFormTitle">Currently tracked builds <a class="action-button" href="${h.url_for( controller='data_admin', action='add_genome' )}">Add new</a></div><div class="toolFormBody"><h2>Locally cached data:</h2> - <h3>NOTE: Indexers queued here will not be reflected in the table until Galaxy is restarted.</h3> + <h3>NOTE: Indexes generated here will not be reflected in the table until Galaxy is restarted.</h3><table id="locfiles"> - <tr><th>Database ID</th><th>Name</th><th>Bowtie</th><th>Bowtie 2</th><th>BWA</th><th>Sam</th><th>Picard</th><th>PerM</th></tr> - %for dbkey in sorted(genomes.keys()): + <tr> + <th>DB Key</th> + <th>Name</th> + %for label in labels: + <th>${labels[label]}</th> + %endfor + </tr> + %for dbkey in sorted(dbkeys): <tr><td>${dbkey}</td> - <td>${genomes[dbkey]['name']}</td> - <td id="${dbkey}-bowtie" class="indexcell ${genomes[dbkey]['bowtie_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie_indexes']['state']}</td> - <td id="${dbkey}-bowtie2" class="indexcell ${genomes[dbkey]['bowtie2_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie2" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie2_indexes']['state']}</td> - <td id="${dbkey}-bwa" class="indexcell ${genomes[dbkey]['bwa_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bwa" data-dbkey="${dbkey}">${genomes[dbkey]['bwa_indexes']['state']}</td> - <td id="${dbkey}-sam" class="indexcell ${genomes[dbkey]['sam_fa_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="sam" data-dbkey="${dbkey}">${genomes[dbkey]['sam_fa_indexes']['state']}</td> - <td id="${dbkey}-picard" class="indexcell ${genomes[dbkey]['srma_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="picard" data-dbkey="${dbkey}">${genomes[dbkey]['srma_indexes']['state']}</td> - <td id="${dbkey}-perm" class="indexcell ${genomes[dbkey]['perm_base_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="perm" data-dbkey="${dbkey}">${genomes[dbkey]['perm_base_indexes']['state']}</td> + <td>${indextable[dbkey]['name']}</td> + %for label in labels: + <td id="${dbkey}-${indexfuncs[label]}" class="indexcell ${styles[indextable[dbkey]['indexes'][label]]}" data-fapath="${indextable[dbkey]['path']}" data-longname="${indextable[dbkey]['name']}" data-index="${indexfuncs[label]}" data-dbkey="${dbkey}">${indextable[dbkey]['indexes'][label]}</td> + %endfor + </tr> %endfor </table> @@ -124,6 +129,7 @@ jsondata["name"] = $('#job-' + jobid).attr('data-name'); jsondata["dbkey"] = $('#job-' + jobid).attr('data-dbkey'); jsondata["indexes"] = $('#job-' + jobid).attr('data-indexes'); + tdid = jq(jsondata["dbkey"] + '-' + jsondata["indexes"]); newhtml = makeNewHTML(jsondata); $('#job-' + jobid).replaceWith(newhtml); if ($.inArray(jsondata["status"], finalstates) == -1) { @@ -133,7 +139,7 @@ }); } if (jsondata["status"] == 'done' || jsondata["status"] == 'ok') { - elem = $('#' + jsondata["dbkey"] + '-' + jsondata["indexes"]); + elem = $(tdid); elem.html('Generated'); elem.attr('class', 'indexcell panel-done-message'); } @@ -156,5 +162,8 @@ } }); }); - + + function jq(id) { + return '#' + id.replace(/(:|\.)/g,'\\$1'); + } </script> \ No newline at end of file diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako --- a/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako +++ b/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako @@ -43,14 +43,17 @@ ${render_msg( message, status )} %endif +<div class="warningmessage"> + Resetting metadata may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk. + Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not be helpful. Watch + the tool shed paster log to pass the time if necessary. +</div> + <div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div><form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}" method="post" ><div class="form-row"> - Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. Resetting metadata - may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk. - Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not - be helpful. Watch the tool shed paster log to pass the time if necessary. + Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. </div><div style="clear: both"></div><div class="form-row"> diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/webapps/community/repository/tool_form.mako --- a/templates/webapps/community/repository/tool_form.mako +++ b/templates/webapps/community/repository/tool_form.mako @@ -186,12 +186,15 @@ <div class="toolHelp"><div class="toolHelpBody"><% + tool_help = tool.help + # Help is Mako template, so render using current static path. + tool_help = tool_help.render( static_path=h.url_for( '/static' ) ) # Convert to unicode to display non-ascii characters. - if type( tool.help ) is not unicode: - tool.help = unicode( tool.help, 'utf-8') + if type( tool_help ) is not unicode: + tool_help = unicode( tool_help, 'utf-8') %> - ${tool.help} - </div> + ${tool_help} + </div></div> %endif %else: diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 test/functional/test_library_templates.py --- a/test/functional/test_library_templates.py +++ b/test/functional/test_library_templates.py @@ -398,7 +398,7 @@ folder_id=self.security.encode_id( folder3.id ), upload_option='import_from_history', hda_ids=self.security.encode_id( hda.id ), - strings_displayed=[ '<input type="hidden" name="%s" value="Option1"/>' % select_field_name ] ) + strings_displayed=[ '<select name="%s" last_selected_value="Option1">' % select_field_name ] ) ldda = get_latest_ldda_by_name( filename ) assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database' self.browse_library( cntrller='library_admin', diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 tools/new_operations/operation_filter.py --- a/tools/new_operations/operation_filter.py +++ b/tools/new_operations/operation_filter.py @@ -3,6 +3,9 @@ from galaxy import eggs from galaxy import jobs from galaxy.tools.parameters import DataToolParameter + +from galaxy.jobs.handler import JOB_ERROR + # Older py compatibility try: set() @@ -63,8 +66,8 @@ raise Exception( stderr ) except Exception, exc: - data.blurb = jobs.JOB_ERROR - data.state = jobs.JOB_ERROR + data.blurb = JOB_ERROR + data.state = JOB_ERROR ## def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None): ## pass https://bitbucket.org/galaxy/galaxy-central/changeset/713ee9f0ae6a/ changeset: 713ee9f0ae6a user: dannon date: 2012-08-13 19:22:51 summary: Merge pull request. affected #: 5 files diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -544,7 +544,10 @@ else: return False -class Group( object ): +class Group( object, APIItem ): + api_collection_visible_keys = ( 'id', 'name' ) + api_element_visible_keys = ( 'id', 'name' ) + def __init__( self, name = None ): self.name = name self.deleted = False diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/api/group_roles.py --- /dev/null +++ b/lib/galaxy/web/api/group_roles.py @@ -0,0 +1,124 @@ +""" +API operations on Group objects. +""" +import logging +from galaxy.web.base.controller import BaseAPIController, url_for +from galaxy import web + +log = logging.getLogger( __name__ ) + +class GroupRolesAPIController( BaseAPIController ): + + @web.expose_api + @web.require_admin + def index( self, trans, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/roles + Displays a collection (list) of groups. + """ + decoded_group_id = trans.security.decode_id( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + rval = [] + try: + for gra in group.roles: + role = gra.role + encoded_id = trans.security.encode_id( role.id ) + rval.append( dict( id = encoded_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=encoded_id, ) ) ) + except Exception, e: + rval = "Error in group API at listing roles" + log.error( rval + ": %s" % str(e) ) + trans.response.status = 500 + return rval + + @web.expose_api + @web.require_admin + def show( self, trans, id, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/roles/{encoded_role_id} + Displays information about a group role. + """ + role_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_role_id = trans.security.decode_id( role_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id ) + for gra in group.roles: + if gra.role == role: + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) # TODO Fix This + if not item: + item = "role %s not in group %s" % (role.name,group.name) + except Exception, e: + item = "Error in group_role API group %s role %s" % (group.name, role.name) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def update( self, trans, id, group_id, **kwd ): + """ + PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id} + Adds a role to a group + """ + role_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_role_id = trans.security.decode_id( role_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id ) + for gra in group.roles: + if gra.role == role: + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) + if not item: + gra = trans.app.model.GroupRoleAssociation( group, role ) + # Add GroupRoleAssociation + trans.sa_session.add( gra ) + trans.sa_session.flush() + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) + except Exception, e: + item = "Error in group_role API Adding role %s to group %s" % (role.name,group.name) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def delete( self, trans, id, group_id, **kwd ): + """ + DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id} + Removes a role from a group + """ + role_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_role_id = trans.security.decode_id( role_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id ) + for gra in group.roles: + if gra.role == role: + trans.sa_session.delete( gra ) + trans.sa_session.flush() + item = dict( id = role_id, + name = role.name, + url = url_for( 'group_role', group_id=group_id, id=role_id) ) + if not item: + item = "role %s not in group %s" % (role.name,group.name) + except Exception, e: + item = "Error in group_role API Removing role %s from group %s" % (role.name,group.name) + log.error(item + ": %s" % str(e)) + return item diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/api/group_users.py --- /dev/null +++ b/lib/galaxy/web/api/group_users.py @@ -0,0 +1,124 @@ +""" +API operations on Group objects. +""" +import logging +from galaxy.web.base.controller import BaseAPIController, url_for +from galaxy import web + +log = logging.getLogger( __name__ ) + +class GroupUsersAPIController( BaseAPIController ): + + @web.expose_api + @web.require_admin + def index( self, trans, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/users + Displays a collection (list) of groups. + """ + decoded_group_id = trans.security.decode_id( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + rval = [] + try: + for uga in group.users: + user = uga.user + encoded_id = trans.security.encode_id( user.id ) + rval.append( dict( id = encoded_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=encoded_id, ) ) ) + except Exception, e: + rval = "Error in group API at listing users" + log.error( rval + ": %s" % str(e) ) + trans.response.status = 500 + return rval + + @web.expose_api + @web.require_admin + def show( self, trans, id, group_id, **kwd ): + """ + GET /api/groups/{encoded_group_id}/users/{encoded_user_id} + Displays information about a group user. + """ + user_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_user_id = trans.security.decode_id( user_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id ) + for uga in group.users: + if uga.user == user: + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) # TODO Fix This + if not item: + item = "user %s not in group %s" % (user.email,group.name) + except Exception, e: + item = "Error in group_user API group %s user %s" % (group.name, user.email) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def update( self, trans, id, group_id, **kwd ): + """ + PUT /api/groups/{encoded_group_id}/users/{encoded_user_id} + Adds a user to a group + """ + user_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_user_id = trans.security.decode_id( user_id ) + item = None + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id ) + for uga in group.users: + if uga.user == user: + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) + if not item: + uga = trans.app.model.UserGroupAssociation( user, group ) + # Add UserGroupAssociations + trans.sa_session.add( uga ) + trans.sa_session.flush() + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) + except Exception, e: + item = "Error in group_user API Adding user %s to group %s" % (user.email,group.name) + log.error(item + ": %s" % str(e)) + return item + + @web.expose_api + @web.require_admin + def delete( self, trans, id, group_id, **kwd ): + """ + DELETE /api/groups/{encoded_group_id}/users/{encoded_user_id} + Removes a user from a group + """ + user_id = id + decoded_group_id = trans.security.decode_id( group_id ) + decoded_user_id = trans.security.decode_id( user_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id ) + for uga in group.users: + if uga.user == user: + trans.sa_session.delete( uga ) + trans.sa_session.flush() + item = dict( id = user_id, + email = user.email, + url = url_for( 'group_user', group_id=group_id, id=user_id) ) + if not item: + item = "user %s not in group %s" % (user.email,group.name) + except Exception, e: + item = "Error in group_user API Removing user %s from group %s" % (user.email,group.name) + log.error(item + ": %s" % str(e)) + return item diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/api/groups.py --- /dev/null +++ b/lib/galaxy/web/api/groups.py @@ -0,0 +1,128 @@ +""" +API operations on Group objects. +""" +import logging +from galaxy.web.base.controller import BaseAPIController, url_for +from galaxy import web + + +log = logging.getLogger( __name__ ) + + +class GroupAPIController( BaseAPIController ): + + @web.expose_api + @web.require_admin + def index( self, trans, **kwd ): + """ + GET /api/groups + Displays a collection (list) of groups. + """ + rval = [] + for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted == False ): + if trans.user_is_admin(): + item = group.get_api_value( value_mapper={ 'id': trans.security.encode_id } ) + encoded_id = trans.security.encode_id( group.id ) + item['url'] = url_for( 'group', id=encoded_id ) + rval.append( item ) + return rval + + @web.expose_api + def create( self, trans, payload, **kwd ): + """ + POST /api/groups + Creates a new group. + """ + log.info("groups payload%s\n" % (payload)) + if not trans.user_is_admin(): + trans.response.status = 403 + return "You are not authorized to create a new group." + name = payload.get( 'name', None ) + if not name: + trans.response.status = 400 + return "Enter a valid name" + if trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first(): + trans.response.status = 400 + return "A group with that name already exists" + + group = trans.app.model.Group( name=name ) + trans.sa_session.add( group ) + user_ids = payload.get( 'user_ids', [] ) + for i in user_ids: + log.info("user_id: %s\n" % (i )) + log.info("%s %s\n" % (i, trans.security.decode_id( i ) )) + users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ] + role_ids = payload.get( 'role_ids', [] ) + roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ] + trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users ) + """ + # Create the UserGroupAssociations + for user in users: + trans.app.security_agent.associate_user_group( user, group ) + # Create the GroupRoleAssociations + for role in roles: + trans.app.security_agent.associate_group_role( group, role ) + """ + trans.sa_session.flush() + encoded_id = trans.security.encode_id( group.id ) + item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } ) + item['url'] = url_for( 'group', id=encoded_id ) + return [ item ] + + @web.expose_api + @web.require_admin + def show( self, trans, id, **kwd ): + """ + GET /api/groups/{encoded_group_id} + Displays information about a group. + """ + group_id = id + try: + decoded_group_id = trans.security.decode_id( group_id ) + except TypeError: + trans.response.status = 400 + return "Malformed group id ( %s ) specified, unable to decode." % str( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } ) + item['url'] = url_for( 'group', id=group_id ) + item['users_url'] = url_for( 'group_users', group_id=group_id ) + item['roles_url'] = url_for( 'group_roles', group_id=group_id ) + return item + + @web.expose_api + @web.require_admin + def update( self, trans, id, payload, **kwd ): + """ + PUT /api/groups/{encoded_group_id} + Modifies a group. + """ + group_id = id + try: + decoded_group_id = trans.security.decode_id( group_id ) + except TypeError: + trans.response.status = 400 + return "Malformed group id ( %s ) specified, unable to decode." % str( group_id ) + try: + group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id ) + except: + group = None + if not group: + trans.response.status = 400 + return "Invalid group id ( %s ) specified." % str( group_id ) + name = payload.get( 'name', None ) + if name: + group.name = name + trans.sa_session.add(group) + user_ids = payload.get( 'user_ids', [] ) + users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ] + role_ids = payload.get( 'role_ids', [] ) + roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ] + trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users,delete_existing_assocs=False ) + trans.sa_session.flush() + diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/buildapp.py --- a/lib/galaxy/web/buildapp.py +++ b/lib/galaxy/web/buildapp.py @@ -122,6 +122,18 @@ 'permissions', path_prefix='/api/libraries/:library_id', parent_resources=dict( member_name='library', collection_name='libraries' ) ) + webapp.api_mapper.resource( 'user', + 'users', + controller='group_users', + name_prefix='group_', + path_prefix='/api/groups/:group_id', + parent_resources=dict( member_name='group', collection_name='groups' ) ) + webapp.api_mapper.resource( 'role', + 'roles', + controller='group_roles', + name_prefix='group_', + path_prefix='/api/groups/:group_id', + parent_resources=dict( member_name='group', collection_name='groups' ) ) webapp.api_mapper.resource( 'dataset', 'datasets', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'library', 'libraries', path_prefix='/api' ) webapp.api_mapper.resource( 'sample', 'samples', path_prefix='/api' ) @@ -129,6 +141,7 @@ webapp.api_mapper.resource( 'form', 'forms', path_prefix='/api' ) webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' ) webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' ) + webapp.api_mapper.resource( 'group', 'groups', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' ) webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' ) https://bitbucket.org/galaxy/galaxy-central/changeset/966a9b393b85/ changeset: 966a9b393b85 branch: add_requirement_tags_for_plink user: dannon date: 2012-08-13 19:23:54 summary: Branch close affected #: 0 files Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket