galaxy-commits
Threads by month
- ----- 2026 -----
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: greg: Move ~/lib/galaxy/tool_shed to ~/lib/tool_shed, and move the ~/lib/galaxy/util/shed_util componets to ~/lib/tool_shed/util.
by commits-noreply@bitbucket.org 26 Feb '13
by commits-noreply@bitbucket.org 26 Feb '13
26 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6f0050c4e06e/
changeset: 6f0050c4e06e
user: greg
date: 2013-02-26 21:52:51
summary: Move ~/lib/galaxy/tool_shed to ~/lib/tool_shed, and move the ~/lib/galaxy/util/shed_util componets to ~/lib/tool_shed/util.
affected #: 77 files
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -3,8 +3,8 @@
from galaxy import config, jobs, util, tools, web
import galaxy.tools.search
import galaxy.tools.data
-import galaxy.tool_shed
-import galaxy.tool_shed.tool_shed_registry
+import tool_shed.galaxy_install
+import tool_shed.tool_shed_registry
from galaxy.web import security
import galaxy.model
import galaxy.datatypes.registry
@@ -41,7 +41,7 @@
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
# Set up the tool sheds registry
if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
else:
self.tool_shed_registry = None
log.debug( 'self.config.tool_sheds_config: %s, self.tool_shed_registry: %s',
@@ -51,7 +51,7 @@
from galaxy.model.migrate.check import create_or_verify_database
create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self )
# Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
- from galaxy.tool_shed.migrate.check import verify_tools
+ from tool_shed.galaxy_install.migrate.check import verify_tools
verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
# Object store manager
self.object_store = build_object_store_from_config(self.config)
@@ -64,7 +64,7 @@
object_store = self.object_store,
trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
- self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
+ self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
self.datatypes_registry = galaxy.datatypes.registry.Registry()
# Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
@@ -105,7 +105,7 @@
self.data_managers = DataManagers( self )
# If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
if self.config.get_bool( 'enable_tool_shed_check', False ):
- from tool_shed import update_manager
+ from tool_shed.galaxy_install import update_manager
self.update_manager = update_manager.UpdateManager( self )
# Load proprietary datatype converters and display applications.
self.installed_repository_manager.load_proprietary_converters_and_display_applications()
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3031,6 +3031,10 @@
pass
class ToolShedRepository( object ):
+ api_collection_visible_keys = ( 'id', 'name', 'tool_shed', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+ 'update_available', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
+ api_element_visible_keys = ( 'id', 'name', 'tool_shed', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+ 'update_available', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
installation_status = Bunch( NEW='New',
CLONING='Cloning',
SETTING_TOOL_VERSIONS='Setting tool versions',
@@ -3066,6 +3070,10 @@
self.dist_to_shed = dist_to_shed
self.status = status
self.error_message = error_message
+ def as_dict( self, trans ):
+ tsr_dict = self.get_api_value( view='element' )
+ tsr_dict[ 'id' ] = trans.security.encode_id( self.id )
+ return tsr_dict
def repo_files_directory( self, app ):
repo_path = self.repo_path( app )
if repo_path:
@@ -3153,6 +3161,22 @@
if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
return shed_tool_conf_dict
return default
+ def get_api_value( self, view='collection', value_mapper=None ):
+ if value_mapper is None:
+ value_mapper = {}
+ rval = {}
+ try:
+ visible_keys = self.__getattribute__( 'api_' + view + '_visible_keys' )
+ except AttributeError:
+ raise Exception( 'Unknown API view: %s' % view )
+ for key in visible_keys:
+ try:
+ rval[ key ] = self.__getattribute__( key )
+ if key in value_mapper:
+ rval[ key ] = value_mapper.get( key )( rval[ key ] )
+ except AttributeError:
+ rval[ key ] = None
+ return rval
@property
def can_install( self ):
return self.status == self.installation_status.NEW
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py
@@ -0,0 +1,44 @@
+"""
+Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+
+ metadata.reflect()
+ # Create the table.
+ try:
+ cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
+ db_session.execute( cmd )
+ except Exception, e:
+ log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ try:
+ cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
+ db_session.execute( cmd )
+ except Exception, e:
+ log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) )
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-Classes encapsulating the management of repositories installed from Galaxy tool sheds.
-"""
-import os
-import galaxy.util.shed_util
-import galaxy.util.shed_util_common
-from galaxy.model.orm import and_
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree, ElementInclude
-
-class InstalledRepositoryManager( object ):
- def __init__( self, app ):
- self.app = app
- self.model = self.app.model
- self.sa_session = self.model.context.current
- self.tool_configs = self.app.config.tool_configs
- if self.app.config.migrated_tools_config not in self.tool_configs:
- self.tool_configs.append( self.app.config.migrated_tools_config )
- self.installed_repository_dicts = []
- def get_repository_install_dir( self, tool_shed_repository ):
- for tool_config in self.tool_configs:
- tree = ElementTree.parse( tool_config )
- root = tree.getroot()
- ElementInclude.include( root )
- tool_path = root.get( 'tool_path', None )
- if tool_path:
- tool_shed = galaxy.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed )
- relative_path = os.path.join( tool_path,
- tool_shed,
- 'repos',
- tool_shed_repository.owner,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision )
- if os.path.exists( relative_path ):
- return relative_path
- return None
- def load_proprietary_datatypes( self ):
- for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \
- .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True,
- self.model.ToolShedRepository.table.c.deleted==False ) ) \
- .order_by( self.model.ToolShedRepository.table.c.id ):
- relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
- if relative_install_dir:
- installed_repository_dict = galaxy.util.shed_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
- if installed_repository_dict:
- self.installed_repository_dicts.append( installed_repository_dict )
- def load_proprietary_converters_and_display_applications( self, deactivate=False ):
- for installed_repository_dict in self.installed_repository_dicts:
- if installed_repository_dict[ 'converter_path' ]:
- galaxy.util.shed_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate )
- if installed_repository_dict[ 'display_path' ]:
- galaxy.util.shed_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate )
-
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/common_util.py
--- a/lib/galaxy/tool_shed/common_util.py
+++ /dev/null
@@ -1,93 +0,0 @@
-import os, urllib2
-from galaxy import util
-from galaxy.util.odict import odict
-from galaxy.tool_shed import encoding_util
-
-REPOSITORY_OWNER = 'devteam'
-
-def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
- # Get the 000x_tools.xml file associated with the current migrate_tools version number.
- tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
- # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
- migrated_tool_configs_dict = odict()
- tree = util.parse_xml( tools_xml_file_path )
- root = tree.getroot()
- tool_shed = root.get( 'name' )
- tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- if tool_shed_url:
- for elem in root:
- if elem.tag == 'repository':
- tool_dependencies = []
- tool_dependencies_dict = {}
- repository_name = elem.get( 'name' )
- changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
- ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
- try:
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- tool_shed_accessible = True
- except Exception, e:
- # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
- tool_shed_accessible = False
- print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
- if tool_shed_accessible:
- if text:
- tool_dependencies_dict = encoding_util.tool_shed_decode( text )
- for dependency_key, requirements_dict in tool_dependencies_dict.items():
- tool_dependency_name = requirements_dict[ 'name' ]
- tool_dependency_version = requirements_dict[ 'version' ]
- tool_dependency_type = requirements_dict[ 'type' ]
- tool_dependency_readme = requirements_dict.get( 'readme', '' )
- tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
- for tool_elem in elem.findall( 'tool' ):
- migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
- if tool_shed_accessible:
- # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
- missing_tool_configs_dict = odict()
- for tool_panel_config in tool_panel_configs:
- tree = util.parse_xml( tool_panel_config )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- elif elem.tag == 'section':
- for section_elem in elem:
- if section_elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- else:
- exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
- exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
- raise Exception( exception_msg )
- return tool_shed_accessible, missing_tool_configs_dict
-def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
- file_path = elem.get( 'file', None )
- if file_path:
- path, name = os.path.split( file_path )
- if name in migrated_tool_configs_dict:
- tool_dependencies = migrated_tool_configs_dict[ name ]
- missing_tool_configs_dict[ name ] = tool_dependencies
- return missing_tool_configs_dict
-def get_non_shed_tool_panel_configs( app ):
- # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
- config_filenames = []
- for config_filename in app.config.tool_configs:
- # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
- # <toolbox tool_path="../shed_tools">
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
- tool_path = root.get( 'tool_path', None )
- if tool_path is None:
- config_filenames.append( config_filename )
- return config_filenames
-def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
- search_str = '://%s' % tool_shed
- for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( search_str ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- return None
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/encoding_util.py
--- a/lib/galaxy/tool_shed/encoding_util.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import binascii, logging
-from galaxy.util.hash_util import hmac_new
-from galaxy.util.json import json_fix
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( "simplejson" )
-import simplejson
-
-log = logging.getLogger( __name__ )
-
-encoding_sep = '__esep__'
-encoding_sep2 = '__esepii__'
-
-def tool_shed_decode( value ):
- # Extract and verify hash
- a, b = value.split( ":" )
- value = binascii.unhexlify( b )
- test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value )
- assert a == test
- # Restore from string
- values = None
- try:
- values = simplejson.loads( value )
- except Exception, e:
- log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
- if values is not None:
- try:
- return json_fix( values )
- except Exception, e:
- log.debug( "Fixing decoded json values '%s' from tool shed threw exception: %s" % ( str( values ), str( e ) ) )
- fixed_values = values
- if values is None:
- values = value
- return values
-def tool_shed_encode( val ):
- if isinstance( val, dict ):
- value = simplejson.dumps( val )
- else:
- value = val
- a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value )
- b = binascii.hexlify( value )
- return "%s:%s" % ( a, b )
\ No newline at end of file
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ /dev/null
@@ -1,372 +0,0 @@
-"""
-Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
-All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
-"""
-import os, urllib2, tempfile
-from galaxy import util
-from galaxy.tools import ToolSection
-from galaxy.util.json import from_json_string, to_json_string
-import galaxy.util.shed_util as shed_util
-import galaxy.util.shed_util_common as suc
-from galaxy.util.odict import odict
-from galaxy.tool_shed import common_util
-
-class InstallManager( object ):
- def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
- """
- Check tool settings in tool_shed_install_config and install all repositories that are not already installed. The tool
- panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
- """
- self.app = app
- self.toolbox = self.app.toolbox
- self.migrated_tools_config = migrated_tools_config
- # If install_dependencies is True but tool_dependency_dir is not set, do not attempt to install but print informative error message.
- if install_dependencies and app.config.tool_dependency_dir is None:
- message = 'You are attempting to install tool dependencies but do not have a value for "tool_dependency_dir" set in your universe_wsgi.ini '
- message += 'file. Set this location value to the path where you want tool dependencies installed and rerun the migration script.'
- raise Exception( message )
- # Get the local non-shed related tool panel configs (there can be more than one, and the default name is tool_conf.xml).
- self.proprietary_tool_confs = self.non_shed_tool_panel_configs
- self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
- # Set the location where the repositories will be installed by retrieving the tool_path setting from migrated_tools_config.
- tree = util.parse_xml( migrated_tools_config )
- root = tree.getroot()
- self.tool_path = root.get( 'tool_path' )
- print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
- # Parse tool_shed_install_config to check each of the tools.
- self.tool_shed_install_config = tool_shed_install_config
- tree = util.parse_xml( tool_shed_install_config )
- root = tree.getroot()
- self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = common_util.REPOSITORY_OWNER
- index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
- # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
- # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
- if tool_panel_configs:
- # The missing_tool_configs_dict contents are something like:
- # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
- else:
- # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
- # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
- # the database.
- tool_shed_accessible = True
- missing_tool_configs_dict = odict()
- if tool_shed_accessible:
- if len( self.proprietary_tool_confs ) == 1:
- plural = ''
- file_names = self.proprietary_tool_confs[ 0 ]
- else:
- plural = 's'
- file_names = ', '.join( self.proprietary_tool_confs )
- if missing_tool_configs_dict:
- for repository_elem in root:
- self.install_repository( repository_elem, install_dependencies )
- else:
- message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
- message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
- print message
- else:
- message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
- message += "Try again later.\n"
- print message
- def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
- if self.shed_config_dict.get( 'tool_path' ):
- relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
- found = False
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- if name == tool_config:
- found = True
- break
- if found:
- break
- full_path = str( os.path.abspath( os.path.join( root, name ) ) )
- tool = self.toolbox.load_tool( full_path )
- return suc.generate_tool_guid( repository_clone_url, tool )
- def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
- # Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are
- # either ToolSection elements or Tool elements. These will be used to generate new entries in the migrated_tools_conf.xml
- # file for the installed tools.
- tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
- # Parse the XML and load the file attributes for later checking against the integrated elements from self.proprietary_tool_confs.
- migrated_tool_configs = []
- tree = util.parse_xml( tools_xml_file_path )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'repository':
- for tool_elem in elem:
- migrated_tool_configs.append( tool_elem.get( 'file' ) )
- # Parse each file in self.proprietary_tool_confs and generate the integrated list of tool panel Elements that contain them.
- tool_panel_elems = []
- for proprietary_tool_conf in self.proprietary_tool_confs:
- tree = util.parse_xml( proprietary_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- # Tools outside of sections.
- file_path = elem.get( 'file', None )
- if file_path:
- name = suc.strip_path( file_path )
- if name in migrated_tool_configs:
- if elem not in tool_panel_elems:
- tool_panel_elems.append( elem )
- elif elem.tag == 'section':
- # Tools contained in a section.
- for section_elem in elem:
- if section_elem.tag == 'tool':
- file_path = section_elem.get( 'file', None )
- if file_path:
- name = suc.strip_path( file_path )
- if name in migrated_tool_configs:
- # Append the section, not the tool.
- if elem not in tool_panel_elems:
- tool_panel_elems.append( elem )
- return tool_panel_elems
- def get_containing_tool_sections( self, tool_config ):
- """
- If tool_config is defined somewhere in self.proprietary_tool_panel_elems, return True and a list of ToolSections in which the
- tool is displayed. If the tool is displayed outside of any sections, None is appended to the list.
- """
- tool_sections = []
- is_displayed = False
- for proprietary_tool_panel_elem in self.proprietary_tool_panel_elems:
- if proprietary_tool_panel_elem.tag == 'tool':
- # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
- proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
- proprietary_name = suc.strip_path( proprietary_tool_config )
- if tool_config == proprietary_name:
- # The tool is loaded outside of any sections.
- tool_sections.append( None )
- if not is_displayed:
- is_displayed = True
- if proprietary_tool_panel_elem.tag == 'section':
- # The proprietary_tool_panel_elem looks something like <section name="EMBOSS" id="EMBOSSLite">.
- for section_elem in proprietary_tool_panel_elem:
- if section_elem.tag == 'tool':
- # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
- proprietary_tool_config = section_elem.get( 'file' )
- proprietary_name = suc.strip_path( proprietary_tool_config )
- if tool_config == proprietary_name:
- # The tool is loaded inside of the section_elem.
- tool_sections.append( ToolSection( proprietary_tool_panel_elem ) )
- if not is_displayed:
- is_displayed = True
- return is_displayed, tool_sections
- def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ):
- """Generate the metadata for the installed tool shed repository, among other things."""
- tool_panel_dict_for_display = odict()
- if self.tool_path:
- repo_install_dir = os.path.join( self.tool_path, relative_install_dir )
- else:
- repo_install_dir = relative_install_dir
- for tool_elem in repository_elem:
- # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
- tool_config = tool_elem.get( 'file' )
- guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config )
- # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
- is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
- if is_displayed:
- tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
- for k, v in tool_panel_dict_for_tool_config.items():
- tool_panel_dict_for_display[ k ] = v
- else:
- print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
- % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
- metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict = self.shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
- tool_shed_repository.metadata = metadata_dict
- self.app.sa_session.add( tool_shed_repository )
- self.app.sa_session.flush()
- if 'tool_dependencies' in metadata_dict:
- # All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
- else:
- tool_dependencies = None
- if 'tools' in metadata_dict:
- sample_files = metadata_dict.get( 'sample_files', [] )
- sample_files = [ str( s ) for s in sample_files ]
- tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
- shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
- sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict )
- if repository_tools_tups:
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
- # Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app,
- self.tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
- # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
- if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
- # Install tool dependencies.
- shed_util.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
- # Get the tool_dependencies.xml file from disk.
- tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
- installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies )
- for installed_tool_dependency in installed_tool_dependencies:
- if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR:
- print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':'
- print installed_tool_dependency.error_message, '\n\n'
- shed_util.add_to_tool_panel( self.app,
- tool_shed_repository.name,
- repository_clone_url,
- tool_shed_repository.installed_changeset_revision,
- repository_tools_tups,
- self.repository_owner,
- self.migrated_tools_config,
- tool_panel_dict=tool_panel_dict_for_display,
- new_install=True )
- if 'datatypes' in metadata_dict:
- tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
- if not tool_shed_repository.includes_datatypes:
- tool_shed_repository.includes_datatypes = True
- self.app.sa_session.add( tool_shed_repository )
- self.app.sa_session.flush()
- work_dir = tempfile.mkdtemp()
- datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
- # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
- # after this installation completes.
- converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
- if converter_path or display_path:
- # Create a dictionary of tool shed repository related information.
- repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
- name=tool_shed_repository.name,
- owner=self.repository_owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
- if converter_path:
- # Load proprietary datatype converters
- self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
- if display_path:
- # Load proprietary datatype display applications
- self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- def install_repository( self, repository_elem, install_dependencies ):
- # Install a single repository, loading contained tools into the tool panel.
- name = repository_elem.get( 'name' )
- description = repository_elem.get( 'description' )
- installed_changeset_revision = repository_elem.get( 'changeset_revision' )
- # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
- relative_clone_dir = os.path.join( self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
- clone_dir = os.path.join( self.tool_path, relative_clone_dir )
- if self.__isinstalled( clone_dir ):
- print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
- else:
- tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
- repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
- relative_install_dir = os.path.join( relative_clone_dir, name )
- install_dir = os.path.join( clone_dir, name )
- ctx_rev = suc.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
- tool_shed_repository = suc.create_or_update_tool_shed_repository( app=self.app,
- name=name,
- description=description,
- installed_changeset_revision=installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=self.repository_owner,
- dist_to_shed=True )
- shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
- cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
- if cloned_ok:
- self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- repository_elem=repository_elem,
- install_dependencies=install_dependencies )
- self.app.sa_session.refresh( tool_shed_repository )
- metadata_dict = tool_shed_repository.metadata
- if 'tools' in metadata_dict:
- shed_util.update_tool_shed_repository_status( self.app,
- tool_shed_repository,
- self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = from_json_string( text )
- shed_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
- else:
- # Set the tool versions since they seem to be missing for this repository in the tool shed.
- # CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
- for tool_dict in metadata_dict[ 'tools' ]:
- flush_needed = False
- tool_id = tool_dict[ 'guid' ]
- old_tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = shed_util.get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = shed_util.get_tool_version( self.app, tool_id )
- if not tool_version_using_old_id:
- tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
- tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_old_id )
- self.app.sa_session.flush()
- if not tool_version_using_guid:
- tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
- tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_guid )
- self.app.sa_session.flush()
- # Associate the two versions as parent / child.
- tool_version_association = shed_util.get_tool_version_association( self.app,
- tool_version_using_old_id,
- tool_version_using_guid )
- if not tool_version_association:
- tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
- parent_id=tool_version_using_old_id.id )
- self.app.sa_session.add( tool_version_association )
- self.app.sa_session.flush()
- shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
- @property
- def non_shed_tool_panel_configs( self ):
- return common_util.get_non_shed_tool_panel_configs( self.app )
- def __get_url_from_tool_shed( self, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
- return None
- def __isinstalled( self, clone_dir ):
- full_path = os.path.abspath( clone_dir )
- if os.path.exists( full_path ):
- for root, dirs, files in os.walk( full_path ):
- if '.hg' in dirs:
- # Assume that the repository has been installed if we find a .hg directory.
- return True
- return False
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import sys, os, logging, subprocess
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "sqlalchemy-migrate" )
-
-from migrate.versioning import repository, schema
-from sqlalchemy import *
-from galaxy.util.odict import odict
-from galaxy.tool_shed import common_util
-
-log = logging.getLogger( __name__ )
-
-# Path relative to galaxy
-migrate_repository_directory = os.path.dirname( __file__ ).replace( os.getcwd() + os.path.sep, '', 1 )
-migrate_repository = repository.Repository( migrate_repository_directory )
-dialect_to_egg = {
- "sqlite" : "pysqlite>=2",
- "postgres" : "psycopg2",
- "mysql" : "MySQL_python"
-}
-
-def verify_tools( app, url, galaxy_config_file, engine_options={} ):
- # Check the value in the migrate_tools.version database table column to verify that the number is in
- # sync with the number of version scripts in ~/lib/galaxy/tools/migrate/versions.
- dialect = ( url.split( ':', 1 ) )[0]
- try:
- egg = dialect_to_egg[ dialect ]
- try:
- pkg_resources.require( egg )
- log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
- except:
- # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
- log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
- except KeyError:
- # Let this go, it could possibly work with db's we don't support
- log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
- # Create engine and metadata
- engine = create_engine( url, **engine_options )
- meta = MetaData( bind=engine )
- # The migrate_tools table was created in database version script 0092_add_migrate_tools_table.py.
- version_table = Table( "migrate_tools", meta, autoload=True )
- # Verify that the code and the database are in sync.
- db_schema = schema.ControlledSchema( engine, migrate_repository )
- latest_tool_migration_script_number = migrate_repository.versions.latest
- if latest_tool_migration_script_number != db_schema.version:
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- if app.new_installation:
- # New installations will not be missing tools, so we don't need to worry about them.
- missing_tool_configs_dict = odict()
- else:
- tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
- if tool_panel_configs:
- # The missing_tool_configs_dict contents are something like:
- # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
- else:
- # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
- # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
- # the database.
- tool_shed_accessible = True
- missing_tool_configs_dict = odict()
- have_tool_dependencies = False
- for k, v in missing_tool_configs_dict.items():
- if v:
- have_tool_dependencies = True
- break
- config_arg = ''
- if os.path.abspath( os.path.join( os.getcwd(), 'universe_wsgi.ini' ) ) != galaxy_config_file:
- config_arg = ' -c %s' % galaxy_config_file.replace( os.path.abspath( os.getcwd() ), '.' )
- if not app.config.running_functional_tests:
- if tool_shed_accessible:
- # Automatically update the value of the migrate_tools.version database table column.
- cmd = 'sh manage_tools.sh%s upgrade' % config_arg
- proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
- return_code = proc.wait()
- output = proc.stdout.read( 32768 )
- if return_code != 0:
- raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output )
- elif missing_tool_configs_dict:
- if len( tool_panel_configs ) == 1:
- plural = ''
- tool_panel_config_file_names = tool_panel_configs[ 0 ]
- else:
- plural = 's'
- tool_panel_config_file_names = ', '.join( tool_panel_configs )
- msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
- msg += "\n\nThe list of files at the end of this message refers to tools that are configured to load into the tool panel for\n"
- msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools and their dependencies can be\n"
- msg += "automatically installed from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
- msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use). If you do this,\n"
- msg += "be aware that these tools will no longer be available in your Galaxy tool panel, and entries for each of them should\n"
- msg += "be removed from your file%s named %s.\n\n" % ( plural, tool_panel_config_file_names )
- msg += "CRITICAL NOTE IF YOU PLAN TO INSTALL\n"
- msg += "The location in which the tool repositories will be installed is the value of the 'tool_path' attribute in the <tool>\n"
- msg += 'tag of the file named ./migrated_tool_conf.xml (i.e., <toolbox tool_path="../shed_tools">). The default location\n'
- msg += "setting is '../shed_tools', which may be problematic for some cluster environments, so make sure to change it before\n"
- msg += "you execute the installation process if appropriate. The configured location must be outside of the Galaxy installation\n"
- msg += "directory or it must be in a sub-directory protected by a properly configured .hgignore file if the directory is within\n"
- msg += "the Galaxy installation directory hierarchy. This is because tool shed repositories will be installed using mercurial's\n"
- msg += "clone feature, which creates .hg directories and associated mercurial repository files. Not having .hgignore properly\n"
- msg += "configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed\n"
- msg += "repositories if they are in directories that pose conflicts. See mercurial's .hgignore documentation at the following\n"
- msg += "URL for details.\n\nhttp://mercurial.selenic.com/wiki/.hgignore\n\n"
- if have_tool_dependencies:
- msg += "The following tool dependencies can also optionally be installed (see the option flag in the command below). If you\n"
- msg += "choose to install them (recommended), they will be installed within the location specified by the 'tool_dependency_dir'\n"
- msg += "setting in your main Galaxy configuration file (e.g., uninverse_wsgi.ini).\n"
- processed_tool_dependencies = []
- for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
- for tool_dependencies_tup in tool_dependencies:
- if tool_dependencies_tup not in processed_tool_dependencies:
- msg += "------------------------------------\n"
- msg += "Tool Dependency\n"
- msg += "------------------------------------\n"
- msg += "Name: %s, Version: %s, Type: %s\n" % ( tool_dependencies_tup[ 0 ],
- tool_dependencies_tup[ 1 ],
- tool_dependencies_tup[ 2 ] )
- if tool_dependencies_tup[ 3 ]:
- msg += "Requirements and installation information:\n"
- msg += "%s\n" % tool_dependencies_tup[ 3 ]
- else:
- msg += "\n"
- msg += "------------------------------------\n"
- processed_tool_dependencies.append( tool_dependencies_tup )
- msg += "\n"
- msg += "%s" % output.replace( 'done', '' )
- msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
- msg += "sh ./scripts/migrate_tools/%04d_tools.sh\n" % latest_tool_migration_script_number
- msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
- if have_tool_dependencies:
- msg += "The tool dependencies listed above will be installed along with the repositories if you add the 'install_dependencies'\n"
- msg += "option to the above command like this:\n\n"
- msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
- msg += "sh ./scripts/migrate_tools/%04d_tools.sh install_dependencies\n" % latest_tool_migration_script_number
- msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
- msg += "Tool dependencies can be installed after the repositories have been installed as well.\n\n"
- msg += "After the installation process finishes, you can start your Galaxy server. As part of this installation process,\n"
- msg += "entries for each of the following tool config files will be added to the file named ./migrated_tool_conf.xml, so these\n"
- msg += "tools will continue to be loaded into your tool panel. Because of this, existing entries for these files should be\n"
- msg += "removed from your file%s named %s, but only after the installation process finishes.\n\n" % ( plural, tool_panel_config_file_names )
- for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
- msg += "%s\n" % missing_tool_config
- msg += "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"
- raise Exception( msg )
- else:
- log.debug( "The main Galaxy tool shed is not currently available, so skipped tool migration %s until next server startup" % db_schema.version )
- else:
- log.info( "At migrate_tools version %d" % db_schema.version )
-
-def migrate_to_current_version( engine, schema ):
- # Changes to get to current version.
- changeset = schema.changeset( None )
- for ver, change in changeset:
- nextver = ver + changeset.step
- log.info( 'Installing tools from version %s -> %s... ' % ( ver, nextver ) )
- old_stdout = sys.stdout
- class FakeStdout( object ):
- def __init__( self ):
- self.buffer = []
- def write( self, s ):
- self.buffer.append( s )
- def flush( self ):
- pass
- sys.stdout = FakeStdout()
- try:
- schema.runchange( ver, change, changeset.step )
- finally:
- for message in "".join( sys.stdout.buffer ).split( "\n" ):
- log.info( message )
- sys.stdout = old_stdout
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import sys, os, ConfigParser
-import galaxy.config
-import galaxy.datatypes.registry
-from galaxy import tools
-from galaxy.tools.data import *
-import galaxy.model.mapping
-import galaxy.tools.search
-from galaxy.objectstore import build_object_store_from_config
-from galaxy.tool_shed.common_util import *
-import galaxy.tool_shed.tool_shed_registry
-from galaxy.tool_shed import install_manager
-
-class MigrateToolsApplication( object ):
- """Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
- def __init__( self, tools_migration_config ):
- install_dependencies = 'install_dependencies' in sys.argv
- galaxy_config_file = 'universe_wsgi.ini'
- if '-c' in sys.argv:
- pos = sys.argv.index( '-c' )
- sys.argv.pop( pos )
- galaxy_config_file = sys.argv.pop( pos )
- if not os.path.exists( galaxy_config_file ):
- print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % galaxy_config_file
- sys.exit( 1 )
- config_parser = ConfigParser.ConfigParser( { 'here':os.getcwd() } )
- config_parser.read( galaxy_config_file )
- galaxy_config_dict = {}
- for key, value in config_parser.items( "app:main" ):
- galaxy_config_dict[ key ] = value
- self.config = galaxy.config.Configuration( **galaxy_config_dict )
- if not self.config.database_connection:
- self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
- self.config.update_integrated_tool_panel = True
- self.object_store = build_object_store_from_config( self.config )
- # Setup the database engine and ORM
- self.model = galaxy.model.mapping.init( self.config.file_path,
- self.config.database_connection,
- engine_options={},
- create_tables=False,
- object_store=self.object_store )
- # Create an empty datatypes registry.
- self.datatypes_registry = galaxy.datatypes.registry.Registry()
- # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
- self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
- # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
- self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
- config_filename=self.config.tool_data_table_config_path )
- # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
- self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
- tool_data_path=self.tool_data_tables.tool_data_path,
- from_shed_config=True )
- # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
- tool_configs = self.config.tool_configs
- if self.config.migrated_tools_config not in tool_configs:
- tool_configs.append( self.config.migrated_tools_config )
- self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
- # Search support for tools
- self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
- # Set up the tool sheds registry.
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
- # Get the latest tool migration script number to send to the Install manager.
- latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] )
- # The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for containing only those tools that have been
- # eliminated from the distribution and moved to the tool shed. A side-effect of instantiating the InstallManager is the automatic
- # installation of all appropriate tool shed repositories.
- self.install_manager = install_manager.InstallManager( app=self,
- latest_migration_script_number=latest_migration_script_number,
- tool_shed_install_config=os.path.join( self.config.root,
- 'scripts',
- 'migrate_tools',
- tools_migration_config ),
- migrated_tools_config=self.config.migrated_tools_config,
- install_dependencies=install_dependencies )
- @property
- def sa_session( self ):
- return self.model.context.current
- def shutdown( self ):
- self.object_store.shutdown()
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/migrate.cfg
--- a/lib/galaxy/tool_shed/migrate/migrate.cfg
+++ /dev/null
@@ -1,20 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id=GalaxyTools
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table=migrate_tools
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs=[]
\ No newline at end of file
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0001_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0001_tools.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""
-Initialize the version column of the migrate_tools database table to 1. No tool migrations are handled in this version.
-"""
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0002_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0002_tools.py
+++ /dev/null
@@ -1,13 +0,0 @@
-"""
-The Emboss 5.0.0 tools have been eliminated from the distribution and the Emboss datatypes have been removed from
-datatypes_conf.xml.sample. You should remove the Emboss datatypes from your version of datatypes_conf.xml. The
-repositories named emboss_5 and emboss_datatypes from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
-will be installed into your local Galaxy instance at the location discussed above by running the following command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0003_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0003_tools.py
+++ /dev/null
@@ -1,12 +0,0 @@
-"""
-The freebayes tool has been eliminated from the distribution . The repository named freebayes from the main
-Galaxy tool shed at http://toolshed.g2.bx.psu.edu will be installed into your local Galaxy instance at the
-location discussed above by running the following command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0004_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0004_tools.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""
-The NCBI BLAST+ tools have been eliminated from the distribution. The tools and
-datatypes are now available in repositories named ncbi_blast_plus and
-blast_datatypes, in the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu.
-These repositories will be installed into your local Galaxy instance at the
-location discussed above by running the following command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0005_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0005_tools.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-The tools "Map with BWA for Illumina" and "Map with BWA for SOLiD" have
-been eliminated from the distribution. The tools are now available
-in the repository named bwa_wrappers from the main Galaxy tool shed at
-http://toolshed.g2.bx.psu.edu, and will be installed into your local
-Galaxy instance at the location discussed above by running the following
-command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0006_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0006_tools.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-The following tools have been eliminated from the distribution:
-FASTQ to BAM, SAM to FASTQ, BAM Index Statistics, Estimate Library
-Complexity, Insertion size metrics for PAIRED data, SAM/BAM Hybrid
-Selection Metrics, bam/sam Cleaning, Add or Replace Groups, Replace
-SAM/BAM Header, Paired Read Mate Fixer, Mark Duplicate reads,
-SAM/BAM Alignment Summary Metrics, SAM/BAM GC Bias Metrics, and
-Reorder SAM/BAM. The tools are now available in the repository
-named picard from the main Galaxy tool shed at
-http://toolshed.g2.bx.psu.edu, and will be installed into your
-local Galaxy instance at the location discussed above by running
-the following command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/migrate/versions/0007_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0007_tools.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-The following tools have been eliminated from the distribution:
-Map with Bowtie for Illumina, Map with Bowtie for SOLiD, Lastz,
-and Lastz paired reads. The tools are now available in the
-repositories named bowtie_wrappers, bowtie_color_wrappers, lastz,
-and lastz_paired_reads from the main Galaxy tool shed at
-http://toolshed.g2.bx.psu.edu, and will be installed into your
-local Galaxy instance at the location discussed above by running
-the following command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ /dev/null
@@ -1,149 +0,0 @@
-import os, shutil, tarfile, urllib2, zipfile
-from galaxy.datatypes import checkers
-
-def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
- env_var_name = elem.get( 'name', 'PATH' )
- env_var_action = elem.get( 'action', 'prepend_to' )
- env_var_text = None
- if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0:
- if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1:
- env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir )
- return dict( name=env_var_name, action=env_var_action, value=env_var_text )
- else:
- env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_dependency_install_dir )
- return dict( name=env_var_name, action=env_var_action, value=env_var_text )
- if elem.text and elem.text.find( 'INSTALL_DIR' ) >= 0:
- if tool_dependency_install_dir:
- env_var_text = elem.text.replace( '$INSTALL_DIR', tool_dependency_install_dir )
- return dict( name=env_var_name, action=env_var_action, value=env_var_text )
- else:
- env_var_text = elem.text.replace( '$INSTALL_DIR', tool_shed_repository_install_dir )
- return dict( name=env_var_name, action=env_var_action, value=env_var_text )
- if elem.text:
- # Allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR since there may be command line
- # parameters that are tuned for a Galaxy instance. Allowing them to be set in one location rather than being hard coded into
- # each tool config is the best approach. For example:
- # <environment_variable name="GATK2_SITE_OPTIONS" action="set_to">
- # "--num_threads 4 --num_cpu_threads_per_data_thread 3 --phone_home STANDARD"
- # </environment_variable>
- return dict( name=env_var_name, action=env_var_action, value=elem.text)
- return None
-def create_or_update_env_shell_file( install_dir, env_var_dict ):
- env_var_name = env_var_dict[ 'name' ]
- env_var_action = env_var_dict[ 'action' ]
- env_var_value = env_var_dict[ 'value' ]
- if env_var_action == 'prepend_to':
- changed_value = '%s:$%s' % ( env_var_value, env_var_name )
- elif env_var_action == 'set_to':
- changed_value = '%s' % env_var_value
- elif env_var_action == 'append_to':
- changed_value = '$%s:%s' % ( env_var_name, env_var_value )
- env_shell_file_path = '%s/env.sh' % install_dir
- if os.path.exists( env_shell_file_path ):
- write_action = '>>'
- else:
- write_action = '>'
- cmd = "echo '%s=%s; export %s' %s %s;chmod +x %s" % ( env_var_name,
- changed_value,
- env_var_name,
- write_action,
- env_shell_file_path,
- env_shell_file_path )
- return cmd
-def extract_tar( file_name, file_path ):
- if isgzip( file_name ) or isbz2( file_name ):
- # Open for reading with transparent compression.
- tar = tarfile.open( file_name, 'r:*' )
- else:
- tar = tarfile.open( file_name )
- tar.extractall( path=file_path )
- tar.close()
-def extract_zip( archive_path, extraction_path ):
- # TODO: change this method to use zipfile.Zipfile.extractall() when we stop supporting Python 2.5.
- if not zipfile_ok( archive_path ):
- return False
- zip_archive = zipfile.ZipFile( archive_path, 'r' )
- for name in zip_archive.namelist():
- uncompressed_path = os.path.join( extraction_path, name )
- if uncompressed_path.endswith( '/' ):
- if not os.path.isdir( uncompressed_path ):
- os.makedirs( uncompressed_path )
- else:
- file( uncompressed_path, 'wb' ).write( zip_archive.read( name ) )
- zip_archive.close()
- return True
-def isbz2( file_path ):
- return checkers.is_bz2( file_path )
-def isgzip( file_path ):
- return checkers.is_gzip( file_path )
-def istar( file_path ):
- return tarfile.is_tarfile( file_path )
-def iszip( file_path ):
- return checkers.check_zip( file_path )
-def make_directory( full_path ):
- if not os.path.exists( full_path ):
- os.makedirs( full_path )
-def move_directory_files( current_dir, source_dir, destination_dir ):
- source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) )
- destination_directory = os.path.join( destination_dir )
- if not os.path.isdir( destination_directory ):
- os.makedirs( destination_directory )
- for file_name in os.listdir( source_directory ):
- source_file = os.path.join( source_directory, file_name )
- destination_file = os.path.join( destination_directory, file_name )
- shutil.move( source_file, destination_file )
-def move_file( current_dir, source, destination_dir ):
- source_file = os.path.abspath( os.path.join( current_dir, source ) )
- destination_directory = os.path.join( destination_dir )
- if not os.path.isdir( destination_directory ):
- os.makedirs( destination_directory )
- shutil.move( source_file, destination_directory )
-def tar_extraction_directory( file_path, file_name ):
- """Try to return the correct extraction directory."""
- file_name = file_name.strip()
- extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.tar', '.zip' ]
- for extension in extensions:
- if file_name.find( extension ) > 0:
- dir_name = file_name[ :-len( extension ) ]
- if os.path.exists( os.path.abspath( os.path.join( file_path, dir_name ) ) ):
- return dir_name
- if os.path.exists( os.path.abspath( os.path.join( file_path, file_name ) ) ):
- return os.path.abspath( file_path )
- raise ValueError( 'Could not find path to file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) )
-def url_download( install_dir, downloaded_file_name, download_url ):
- file_path = os.path.join( install_dir, downloaded_file_name )
- src = None
- dst = None
- try:
- src = urllib2.urlopen( download_url )
- data = src.read()
- dst = open( file_path,'wb' )
- dst.write( data )
- except:
- if src:
- src.close()
- if dst:
- dst.close()
- return os.path.abspath( file_path )
-def zip_extraction_directory( file_path, file_name ):
- """Try to return the correct extraction directory."""
- files = [ filename for filename in os.listdir( file_path ) if not filename.endswith( '.zip' ) ]
- if len( files ) > 1:
- return os.path.abspath( file_path )
- elif len( files ) == 1:
- # If there is only on file it should be a directory.
- if os.path.isdir( os.path.join( file_path, files[ 0 ] ) ):
- return os.path.abspath( os.path.join( file_path, files[ 0 ] ) )
- raise ValueError( 'Could not find directory for the extracted file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) )
-def zipfile_ok( path_to_archive ):
- """
- This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction,
- because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details.
- """
- basename = os.path.realpath( os.path.dirname( path_to_archive ) )
- zip_archive = zipfile.ZipFile( path_to_archive )
- for member in zip_archive.namelist():
- member_path = os.path.realpath( os.path.join( basename, member ) )
- if not member_path.startswith( basename ):
- return False
- return True
diff -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 -r 6f0050c4e06ef6f5fdce3f7326280c07c63ef62e lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# For Python 2.5
-from __future__ import with_statement
-
-import os, shutil, tempfile
-from contextlib import contextmanager
-import common_util
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require('ssh' )
-pkg_resources.require( 'Fabric' )
-
-from fabric.api import env, lcd, local, settings
-
-INSTALLATION_LOG = 'INSTALLATION.log'
-
-def check_fabric_version():
- version = env.version
- if int( version.split( "." )[ 0 ] ) < 1:
- raise NotImplementedError( "Install Fabric version 1.0 or later." )
-def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ):
- """General Galaxy environment configuration"""
- env.user = galaxy_user
- env.install_dir = tool_dependency_dir
- env.host_string = host
- env.shell = shell
- env.use_sudo = False
- env.safe_cmd = local
- return env
-@contextmanager
-def make_tmp_dir():
- work_dir = tempfile.mkdtemp()
- yield work_dir
- if os.path.exists( work_dir ):
- local( 'rm -rf %s' % work_dir )
-def handle_command( app, tool_dependency, install_dir, cmd ):
- sa_session = app.model.context.current
- output = local( cmd, capture=True )
- log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
- if output.return_code:
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = str( output.stderr )
- sa_session.add( tool_dependency )
- sa_session.flush()
- return output.return_code
-def install_and_build_package( app, tool_dependency, actions_dict ):
- """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
- sa_session = app.model.context.current
- install_dir = actions_dict[ 'install_dir' ]
- package_name = actions_dict[ 'package_name' ]
- actions = actions_dict.get( 'actions', None )
- filtered_actions = []
- if actions:
- with make_tmp_dir() as work_dir:
- with lcd( work_dir ):
- # The first action in the list of actions will be the one that defines the installation process. There
- # are currently only two supported processes; download_by_url and clone via a "shell_command" action type.
- action_type, action_dict = actions[ 0 ]
- if action_type == 'download_by_url':
- # Eliminate the download_by_url action so remaining actions can be processed correctly.
- filtered_actions = actions[ 1: ]
- url = action_dict[ 'url' ]
- if 'target_filename' in action_dict:
- downloaded_filename = action_dict[ 'target_filename' ]
- else:
- downloaded_filename = os.path.split( url )[ -1 ]
- downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, url )
- if common_util.istar( downloaded_file_path ):
- # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
- common_util.extract_tar( downloaded_file_path, work_dir )
- dir = common_util.tar_extraction_directory( work_dir, downloaded_filename )
- elif common_util.iszip( downloaded_file_path ):
- # <action type="download_by_url">http://downloads.sourceforge.net/project/picard/picard-tools/1.56/picard-to…</action>
- zip_archive_extracted = common_util.extract_zip( downloaded_file_path, work_dir )
- dir = common_util.zip_extraction_directory( work_dir, downloaded_filename )
- else:
- dir = work_dir
- elif action_type == 'shell_command':
- # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
- # Eliminate the shell_command clone action so remaining actions can be processed correctly.
- filtered_actions = actions[ 1: ]
- return_code = handle_command( app, tool_dependency, install_dir, action_dict[ 'command' ] )
- if return_code:
- return
- dir = package_name
- else:
- # We're handling a complex repository dependency where we only have a set_environment tag set.
- # <action type="set_environment">
- # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
- # </action>
- filtered_actions = [ a for a in actions ]
- dir = install_dir
- if not os.path.exists( dir ):
- os.makedirs( dir )
- # The package has been down-loaded, so we can now perform all of the actions defined for building it.
- with lcd( dir ):
- for action_tup in filtered_actions:
- action_type, action_dict = action_tup
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- if action_type == 'make_directory':
- common_util.make_directory( full_path=action_dict[ 'full_path' ] )
- elif action_type == 'move_directory_files':
- common_util.move_directory_files( current_dir=current_dir,
- source_dir=os.path.join( action_dict[ 'source_directory' ] ),
- destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
- elif action_type == 'move_file':
- # TODO: Remove this hack that resets current_dir so that the pre-compiled bwa binary can be found.
- # current_dir = '/Users/gvk/workspaces_2008/bwa/bwa-0.5.9'
- common_util.move_file( current_dir=current_dir,
- source=os.path.join( action_dict[ 'source' ] ),
- destination_dir=os.path.join( action_dict[ 'destination' ] ) )
- elif action_type == 'set_environment':
- # Currently the only action supported in this category is "environment_variable".
- env_var_dicts = action_dict[ 'environment_variable' ]
- for env_var_dict in env_var_dicts:
- cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return
- elif action_type == 'shell_command':
- with settings( warn_only=True ):
- return_code = handle_command( app, tool_dependency, install_dir, action_dict[ 'command' ] )
- if return_code:
- return
-def log_results( command, fabric_AttributeString, file_path ):
- """
- Write attributes of fabric.operations._AttributeString (which is the output of executing command using fabric's local() method)
- to a specified log file.
- """
- if os.path.exists( file_path ):
- logfile = open( file_path, 'ab' )
- else:
- logfile = open( file_path, 'wb' )
- logfile.write( "\n#############################################\n" )
- logfile.write( '%s\nSTDOUT\n' % command )
- logfile.write( str( fabric_AttributeString.stdout ) )
- logfile.write( "\n#############################################\n" )
- logfile.write( "\n#############################################\n" )
- logfile.write( '%s\nSTDERR\n' % command )
- logfile.write( str( fabric_AttributeString.stderr ) )
- logfile.write( "\n#############################################\n" )
- logfile.close()
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Python 2.5 compatibility fix for lwr client updates.
by commits-noreply@bitbucket.org 26 Feb '13
by commits-noreply@bitbucket.org 26 Feb '13
26 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e9e09d1ab517/
changeset: e9e09d1ab517
user: natefoo
date: 2013-02-26 15:33:31
summary: Python 2.5 compatibility fix for lwr client updates.
affected #: 1 file
diff -r 993c232d34cda550a5605f87d7416467d4e2c33f -r e9e09d1ab51752db592eee71032ec5fc33f6ee57 lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- a/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -1,6 +1,7 @@
"""
LWR HTTP Client layer based on Python Standard Library (urllib2)
"""
+from __future__ import with_statement
import mmap
import urllib2
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle new repository_metadata columns when resetting metadata on repositories in the tool shed.
by commits-noreply@bitbucket.org 25 Feb '13
by commits-noreply@bitbucket.org 25 Feb '13
25 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/993c232d34cd/
changeset: 993c232d34cd
user: greg
date: 2013-02-25 22:30:40
summary: Handle new repository_metadata columns when resetting metadata on repositories in the tool shed.
affected #: 3 files
diff -r ecbfab5f9f1b070bda03520700335d800b8fc761 -r 993c232d34cda550a5605f87d7416467d4e2c33f lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -34,8 +34,6 @@
REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = "data_manager_conf.xml"
MAX_CONTENT_SIZE = 32768
NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'repository_dependencies.xml', 'tool_dependencies.xml', REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ]
-GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
-TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
TOOL_TYPES_NOT_IN_TOOL_PANEL = [ 'manage_data' ]
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}&<>" )
@@ -484,10 +482,11 @@
repository_dependency_id=repository_dependency.id )
trans.sa_session.add( rrda )
trans.sa_session.flush()
-def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
+def build_repository_ids_select_field( trans, name='repository_ids', multiple=True, display='checkboxes' ):
"""Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
- if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
for repository in trans.sa_session.query( trans.model.Repository ) \
.filter( trans.model.Repository.table.c.deleted == False ) \
.order_by( trans.model.Repository.table.c.name,
@@ -496,7 +495,8 @@
option_label = '%s (%s)' % ( repository.name, owner )
option_value = '%s' % trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
- elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
+ else:
+ # We're in Galaxy.
for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
.filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
.order_by( trans.model.ToolShedRepository.table.c.name,
@@ -899,6 +899,7 @@
if not os.path.exists( os.path.join( dest_path, copied_file ) ):
shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ """Create or update a repository_metadatqa record in the tool shed."""
downloadable = is_downloadable( metadata_dict )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
@@ -909,6 +910,11 @@
changeset_revision=changeset_revision,
metadata=metadata_dict,
downloadable=downloadable )
+ # Always set the default values for the following columns. When resetting all metadata on a repository, this will reset the values.
+ repository_metadata.tools_functionally_correct = False
+ repository_metadata.do_not_test = False
+ repository_metadata.time_last_tested = None
+ repository_metadata.tool_test_errors = None
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
return repository_metadata
@@ -2949,7 +2955,7 @@
return True
return False
def is_downloadable( metadata_dict ):
- # NOTE: although repository README files are considered Galaxy utilities, they have no effect on determining if a revision is instakllable.
+ # NOTE: although repository README files are considered Galaxy utilities, they have no effect on determining if a revision is installable.
# See the comments in the compare_readme_files() method.
if 'datatypes' in metadata_dict:
# We have proprietary datatypes.
@@ -3503,7 +3509,8 @@
def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
"""Reset all metadata on a single repository in a tool shed."""
def reset_all_tool_versions( trans, id, repo ):
- changeset_revisions = []
+ """Reset tool version lineage for those changeset revisions that include valid tools."""
+ changeset_revisions_that_contain_tools = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -3511,10 +3518,10 @@
metadata = repository_metadata.metadata
if metadata:
if metadata.get( 'tools', None ):
- changeset_revisions.append( changeset_revision )
- # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
+ changeset_revisions_that_contain_tools.append( changeset_revision )
+ # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that are downloadable and contain tools.
# If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
- for index, changeset_revision in enumerate( changeset_revisions ):
+ for index, changeset_revision in enumerate( changeset_revisions_that_contain_tools ):
tool_versions_dict = {}
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
metadata = repository_metadata.metadata
@@ -3531,7 +3538,7 @@
tool_dict[ 'id' ],
tool_dict[ 'version' ],
tool_dict[ 'guid' ],
- changeset_revisions[ 0:index ] )
+ changeset_revisions_that_contain_tools[ 0:index ] )
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
@@ -3556,7 +3563,7 @@
work_dir = tempfile.mkdtemp()
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = repo.changectx( changeset )
- log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
+ log.debug( "Cloning repository changeset revision: %s", str( ctx.rev() ) )
cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
if cloned_ok:
log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
@@ -3626,9 +3633,11 @@
reset_tool_data_tables( trans.app )
return invalid_file_tups, metadata_dict
def reset_metadata_on_selected_repositories( trans, **kwd ):
- # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
+ """
+ Inspect the repository changelog to reset metadata for all appropriate changeset revisions. This method is called from both Galaxy and the
+ Tool Shed.
+ """
repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
- CONTROLLER = kwd[ 'CONTROLLER' ]
message = ''
status = 'done'
if repository_ids:
@@ -3636,10 +3645,12 @@
unsuccessful_count = 0
for repository_id in repository_ids:
try:
- if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
repository = get_repository_in_tool_shed( trans, repository_id )
invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
- elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
+ else:
+ # We're in Galaxy.
repository = get_installed_tool_shed_repository( trans, repository_id )
invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
if invalid_file_tups:
diff -r ecbfab5f9f1b070bda03520700335d800b8fc761 -r 993c232d34cda550a5605f87d7416467d4e2c33f lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -721,12 +721,11 @@
@web.require_admin
def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = suc.TOOL_SHED_ADMIN_CONTROLLER
message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = suc.build_repository_ids_select_field( trans, suc.TOOL_SHED_ADMIN_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans )
return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
diff -r ecbfab5f9f1b070bda03520700335d800b8fc761 -r 993c232d34cda550a5605f87d7416467d4e2c33f lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1711,12 +1711,11 @@
@web.require_admin
def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER
message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = suc.build_repository_ids_select_field( trans, suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans )
return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dd4ecbbbbab3/
changeset: dd4ecbbbbab3
user: jmchilton
date: 2013-02-07 21:18:33
summary: Update lwr_client through https://bitbucket.org/jmchilton/lwr/commits/3034b5cb789a6c96b005b838542c6e7….
UChicago reported some issues with the use of mmap in the LWR client for large files. To get around this, I have implemented an optional alternative transport layer for the LWR client that is backed by pycurl instead of urllib2. This can be enabled by setting the environment variable LWR_CURL_TRANSPORT=1 for the Galaxy process. If LWR_CURL_TRANSPORT is set, the python pycurl package must be installed.
affected #: 4 files
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -5,15 +5,15 @@
This module contains logic for interfacing with an external LWR server.
"""
-import mmap
import os
import re
import time
import urllib
-import urllib2
import simplejson
+from transport import get_transport
+
class JobInputs(object):
"""
@@ -254,6 +254,18 @@
return self.job_inputs.rewritten_command_line
+class parseJson(object):
+
+ def __init__(self):
+ pass
+
+ def __call__(self, func):
+ def replacement(*args, **kwargs):
+ response = func(*args, **kwargs)
+ return simplejson.loads(response)
+ return replacement
+
+
class Client(object):
"""
Objects of this client class perform low-level communication with a remote LWR server.
@@ -283,9 +295,7 @@
self.remote_host = remote_host
self.job_id = job_id
self.private_key = private_key
-
- def _url_open(self, request, data):
- return urllib2.urlopen(request, data)
+ self.transport = get_transport()
def __build_url(self, command, args):
if self.private_key:
@@ -294,29 +304,20 @@
url = self.remote_host + command + "?" + data
return url
- def __raw_execute(self, command, args={}, data=None):
+ def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
url = self.__build_url(command, args)
- request = urllib2.Request(url=url, data=data)
- response = self._url_open(request, data)
+ response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
return response
- def __raw_execute_and_parse(self, command, args={}, data=None):
- response = self.__raw_execute(command, args, data)
- return simplejson.loads(response.read())
-
+ @parseJson()
def __upload_file(self, action, path, name=None, contents=None):
- input = open(path, 'rb')
- try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input, name)
- finally:
- input.close()
-
- def __upload_contents(self, action, path, contents, name=None):
if not name:
name = os.path.basename(path)
args = {"job_id": self.job_id, "name": name}
- return self.__raw_execute_and_parse(action, args, contents)
+ input_path = path
+ if contents:
+ input_path = None
+ return self.__raw_execute(action, args, contents, input_path)
def upload_tool_file(self, path):
"""
@@ -364,7 +365,7 @@
contents : str
Rewritten contents of the config file to upload.
"""
- return self.__upload_contents("upload_config_file", path, contents)
+ return self.__upload_file("upload_config_file", path, contents=contents)
def upload_working_directory_file(self, path):
"""
@@ -378,9 +379,10 @@
"""
return self.__upload_file("upload_working_directory_file", path)
+ @parseJson()
def _get_output_type(self, name):
- return self.__raw_execute_and_parse("get_output_type", {"name": name,
- "job_id": self.job_id})
+ return self.__raw_execute("get_output_type", {"name": name,
+ "job_id": self.job_id})
def download_work_dir_output(self, source, working_directory, output_path):
"""
@@ -414,25 +416,19 @@
name = os.path.basename(path)
output_type = self._get_output_type(name)
if output_type == "direct":
- output = open(path, "wb")
+ output_path = path
elif output_type == "task":
- output = open(os.path.join(working_directory, name), "wb")
+ output_path = os.path.join(working_directory, name)
else:
raise Exception("No remote output found for dataset with path %s" % path)
- self.__raw_download_output(name, self.job_id, output_type, output)
+ self.__raw_download_output(name, self.job_id, output_type, output_path)
- def __raw_download_output(self, name, job_id, output_type, output_file):
- response = self.__raw_execute("download_output", {"name": name,
- "job_id": self.job_id,
- "output_type": output_type})
- try:
- while True:
- buffer = response.read(1024)
- if buffer == "":
- break
- output_file.write(buffer)
- finally:
- output_file.close()
+ def __raw_download_output(self, name, job_id, output_type, output_path):
+ self.__raw_execute("download_output",
+ {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type},
+ output_path=output_path)
def launch(self, command_line):
"""
@@ -463,11 +459,12 @@
return complete_response
time.sleep(1)
+ @parseJson()
def raw_check_complete(self):
"""
Get check_complete response from the remote server.
"""
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id": self.job_id})
+ check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
return check_complete_response
def check_complete(self):
@@ -482,11 +479,12 @@
"""
self.__raw_execute("clean", {"job_id": self.job_id})
+ @parseJson()
def setup(self):
"""
Setup remote LWR server to run this job.
"""
- return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
+ return self.__raw_execute("setup", {"job_id": self.job_id})
def _read(path):
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
@@ -0,0 +1,16 @@
+from standard import Urllib2Transport
+from curl import PycurlTransport
+import os
+
+
+def get_transport(os_module=os):
+ use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0")
+ ## If LWR_CURL_TRANSPORT is unset or set to 0, use default,
+ ## else use curl.
+ if use_curl.isdigit() and not int(use_curl):
+ return Urllib2Transport()
+ else:
+ return PycurlTransport()
+
+
+__all__ = [get_transport]
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/transport/curl.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
@@ -0,0 +1,42 @@
+from cStringIO import StringIO
+try:
+ from pycurl import Curl
+except:
+ pass
+from os.path import getsize
+
+
+PYCURL_UNAVAILABLE_MESSAGE = \
+ "You are attempting to use the Pycurl version of the LWR client by pycurl is unavailable."
+
+
+class PycurlTransport(object):
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ buf = self._open_output(output_path)
+ try:
+ c = self._new_curl_object()
+ c.setopt(c.URL, url.encode('ascii'))
+ c.setopt(c.WRITEFUNCTION, buf.write)
+ if input_path:
+ c.setopt(c.UPLOAD, 1)
+ c.setopt(c.READFUNCTION, open(input_path, 'rb').read)
+ filesize = getsize(input_path)
+ c.setopt(c.INFILESIZE, filesize)
+ if data:
+ c.setopt(c.POST, 1)
+ c.setopt(c.POSTFIELDS, data)
+ c.perform()
+ if not output_path:
+ return buf.getvalue()
+ finally:
+ buf.close()
+
+ def _new_curl_object(self):
+ try:
+ return Curl()
+ except NameError:
+ raise ImportError(PYCURL_UNAVAILABLE_MESSAGE)
+
+ def _open_output(self, output_path):
+ return open(output_path, 'wb') if output_path else StringIO()
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -0,0 +1,33 @@
+"""
+LWR HTTP Client layer based on Python Standard Library (urllib2)
+"""
+import mmap
+import urllib2
+
+
+class Urllib2Transport(object):
+
+ def _url_open(self, request, data):
+ return urllib2.urlopen(request, data)
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ request = urllib2.Request(url=url, data=data)
+ input = None
+ try:
+ if input_path:
+ input = open(input_path, 'rb')
+ data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
+ response = self._url_open(request, data)
+ finally:
+ if input:
+ input.close()
+ if output_path:
+ with open(output_path, 'wb') as output:
+ while True:
+ buffer = response.read(1024)
+ if buffer == "":
+ break
+ output.write(buffer)
+ return response
+ else:
+ return response.read()
https://bitbucket.org/galaxy/galaxy-central/commits/ecbfab5f9f1b/
changeset: ecbfab5f9f1b
user: natefoo
date: 2013-02-25 21:16:30
summary: Merged in jmchilton/galaxy-central-lwr (pull request #118)
Implement optional, alternative pycurl backend for LWR client.
affected #: 4 files
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -5,15 +5,15 @@
This module contains logic for interfacing with an external LWR server.
"""
-import mmap
import os
import re
import time
import urllib
-import urllib2
import simplejson
+from transport import get_transport
+
class JobInputs(object):
"""
@@ -254,6 +254,18 @@
return self.job_inputs.rewritten_command_line
+class parseJson(object):
+
+ def __init__(self):
+ pass
+
+ def __call__(self, func):
+ def replacement(*args, **kwargs):
+ response = func(*args, **kwargs)
+ return simplejson.loads(response)
+ return replacement
+
+
class Client(object):
"""
Objects of this client class perform low-level communication with a remote LWR server.
@@ -283,9 +295,7 @@
self.remote_host = remote_host
self.job_id = job_id
self.private_key = private_key
-
- def _url_open(self, request, data):
- return urllib2.urlopen(request, data)
+ self.transport = get_transport()
def __build_url(self, command, args):
if self.private_key:
@@ -294,29 +304,20 @@
url = self.remote_host + command + "?" + data
return url
- def __raw_execute(self, command, args={}, data=None):
+ def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
url = self.__build_url(command, args)
- request = urllib2.Request(url=url, data=data)
- response = self._url_open(request, data)
+ response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
return response
- def __raw_execute_and_parse(self, command, args={}, data=None):
- response = self.__raw_execute(command, args, data)
- return simplejson.loads(response.read())
-
+ @parseJson()
def __upload_file(self, action, path, name=None, contents=None):
- input = open(path, 'rb')
- try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input, name)
- finally:
- input.close()
-
- def __upload_contents(self, action, path, contents, name=None):
if not name:
name = os.path.basename(path)
args = {"job_id": self.job_id, "name": name}
- return self.__raw_execute_and_parse(action, args, contents)
+ input_path = path
+ if contents:
+ input_path = None
+ return self.__raw_execute(action, args, contents, input_path)
def upload_tool_file(self, path):
"""
@@ -364,7 +365,7 @@
contents : str
Rewritten contents of the config file to upload.
"""
- return self.__upload_contents("upload_config_file", path, contents)
+ return self.__upload_file("upload_config_file", path, contents=contents)
def upload_working_directory_file(self, path):
"""
@@ -378,9 +379,10 @@
"""
return self.__upload_file("upload_working_directory_file", path)
+ @parseJson()
def _get_output_type(self, name):
- return self.__raw_execute_and_parse("get_output_type", {"name": name,
- "job_id": self.job_id})
+ return self.__raw_execute("get_output_type", {"name": name,
+ "job_id": self.job_id})
def download_work_dir_output(self, source, working_directory, output_path):
"""
@@ -414,25 +416,19 @@
name = os.path.basename(path)
output_type = self._get_output_type(name)
if output_type == "direct":
- output = open(path, "wb")
+ output_path = path
elif output_type == "task":
- output = open(os.path.join(working_directory, name), "wb")
+ output_path = os.path.join(working_directory, name)
else:
raise Exception("No remote output found for dataset with path %s" % path)
- self.__raw_download_output(name, self.job_id, output_type, output)
+ self.__raw_download_output(name, self.job_id, output_type, output_path)
- def __raw_download_output(self, name, job_id, output_type, output_file):
- response = self.__raw_execute("download_output", {"name": name,
- "job_id": self.job_id,
- "output_type": output_type})
- try:
- while True:
- buffer = response.read(1024)
- if buffer == "":
- break
- output_file.write(buffer)
- finally:
- output_file.close()
+ def __raw_download_output(self, name, job_id, output_type, output_path):
+ self.__raw_execute("download_output",
+ {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type},
+ output_path=output_path)
def launch(self, command_line):
"""
@@ -463,11 +459,12 @@
return complete_response
time.sleep(1)
+ @parseJson()
def raw_check_complete(self):
"""
Get check_complete response from the remote server.
"""
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id": self.job_id})
+ check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
return check_complete_response
def check_complete(self):
@@ -482,11 +479,12 @@
"""
self.__raw_execute("clean", {"job_id": self.job_id})
+ @parseJson()
def setup(self):
"""
Setup remote LWR server to run this job.
"""
- return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
+ return self.__raw_execute("setup", {"job_id": self.job_id})
def _read(path):
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
@@ -0,0 +1,16 @@
+from standard import Urllib2Transport
+from curl import PycurlTransport
+import os
+
+
+def get_transport(os_module=os):
+ use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0")
+ ## If LWR_CURL_TRANSPORT is unset or set to 0, use default,
+ ## else use curl.
+ if use_curl.isdigit() and not int(use_curl):
+ return Urllib2Transport()
+ else:
+ return PycurlTransport()
+
+
+__all__ = [get_transport]
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/transport/curl.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
@@ -0,0 +1,42 @@
+from cStringIO import StringIO
+try:
+ from pycurl import Curl
+except:
+ pass
+from os.path import getsize
+
+
+PYCURL_UNAVAILABLE_MESSAGE = \
+ "You are attempting to use the Pycurl version of the LWR client by pycurl is unavailable."
+
+
+class PycurlTransport(object):
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ buf = self._open_output(output_path)
+ try:
+ c = self._new_curl_object()
+ c.setopt(c.URL, url.encode('ascii'))
+ c.setopt(c.WRITEFUNCTION, buf.write)
+ if input_path:
+ c.setopt(c.UPLOAD, 1)
+ c.setopt(c.READFUNCTION, open(input_path, 'rb').read)
+ filesize = getsize(input_path)
+ c.setopt(c.INFILESIZE, filesize)
+ if data:
+ c.setopt(c.POST, 1)
+ c.setopt(c.POSTFIELDS, data)
+ c.perform()
+ if not output_path:
+ return buf.getvalue()
+ finally:
+ buf.close()
+
+ def _new_curl_object(self):
+ try:
+ return Curl()
+ except NameError:
+ raise ImportError(PYCURL_UNAVAILABLE_MESSAGE)
+
+ def _open_output(self, output_path):
+ return open(output_path, 'wb') if output_path else StringIO()
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -0,0 +1,33 @@
+"""
+LWR HTTP Client layer based on Python Standard Library (urllib2)
+"""
+import mmap
+import urllib2
+
+
+class Urllib2Transport(object):
+
+ def _url_open(self, request, data):
+ return urllib2.urlopen(request, data)
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ request = urllib2.Request(url=url, data=data)
+ input = None
+ try:
+ if input_path:
+ input = open(input_path, 'rb')
+ data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
+ response = self._url_open(request, data)
+ finally:
+ if input:
+ input.close()
+ if output_path:
+ with open(output_path, 'wb') as output:
+ while True:
+ buffer = response.read(1024)
+ if buffer == "":
+ break
+ output.write(buffer)
+ return response
+ else:
+ return response.read()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/26f7825cc871/
changeset: 26f7825cc871
user: jmchilton
date: 2013-02-23 08:14:22
summary: Refactor code related to expanding multi inputs to more easily allow for switching between matched and product mode in the same workflow execution.
affected #: 1 file
diff -r 1c2e1625dd8a419aa1d335ddc19ba34857c05fc4 -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -2106,22 +2106,27 @@
def _expand_multiple_inputs(kwargs, mode):
- (input_combos, multi_inputs) = _build_input_combos(kwargs, mode)
+ (single_inputs, matched_multi_inputs, multiplied_multi_inputs) = \
+ _split_inputs(kwargs, mode)
+
+ # Build up every combination of inputs to be run together.
+ #input_combos = [single_inputs]
+ input_combos = _extend_with_matched_combos(single_inputs, matched_multi_inputs)
+ input_combos = _extend_with_multiplied_combos(input_combos, multiplied_multi_inputs)
+
+ # Input name that are multiply specified
+ multi_input_keys = \
+ matched_multi_inputs.keys() + multiplied_multi_inputs.keys()
+
for input_combo in input_combos:
for key, value in input_combo.iteritems():
kwargs[key] = value
- yield (kwargs, multi_inputs.keys())
+ yield (kwargs, multi_input_keys)
-def _build_input_combos(kwargs, mode):
- if mode == "product":
- return _build_input_combos_product(kwargs)
- else: # mode == "matched"
- return _build_input_combos_matched(kwargs)
-def _build_input_combos_matched(kwargs):
- (single_inputs, multi_inputs) = _split_inputs(kwargs)
+def _extend_with_matched_combos(single_inputs, multi_inputs):
if len(multi_inputs) == 0:
- return ([{}], {})
+ return [single_inputs]
matched_multi_inputs = []
@@ -2139,11 +2144,12 @@
raise Exception("Failed to match up multi-select inputs, must select equal number of data files in each multiselect")
for index, value in enumerate(multi_input_values):
matched_multi_inputs[index][multi_input_key] = value
- return (matched_multi_inputs, multi_inputs)
+ return matched_multi_inputs
-def _build_input_combos_product(kwargs):
- (single_inputs, multi_inputs) = _split_inputs(kwargs)
- combos = [single_inputs]
+
+def _extend_with_multiplied_combos(input_combos, multi_inputs):
+ combos = input_combos
+
for multi_input_key, multi_input_value in multi_inputs.iteritems():
iter_combos = []
@@ -2152,14 +2158,18 @@
iter_combos.append(_copy_and_extend_inputs(combo, multi_input_key, input_value))
combos = iter_combos
- return (combos, multi_inputs)
+ return combos
+
def _copy_and_extend_inputs(inputs, key, value):
new_inputs = dict(inputs)
new_inputs[key] = value
return new_inputs
-def _split_inputs(kwargs):
+
+def _split_inputs(kwargs, mode):
+ """
+ """
input_keys = filter(lambda a: a.endswith('|input'), kwargs)
single_inputs = {}
multi_inputs = {}
@@ -2169,4 +2179,10 @@
multi_inputs[input_key] = input_val
else:
single_inputs[input_key] = input_val
- return (single_inputs, multi_inputs)
+ matched_multi_inputs = {}
+ multiplied_multi_inputs = {}
+ if mode == "product":
+ multiplied_multi_inputs = multi_inputs
+ else:
+ matched_multi_inputs = multi_inputs
+ return (single_inputs, matched_multi_inputs, multiplied_multi_inputs)
https://bitbucket.org/galaxy/galaxy-central/commits/6f30725d5973/
changeset: 6f30725d5973
user: jmchilton
date: 2013-02-23 08:14:22
summary: Implement UI multi batch workflow inputs.
affected #: 4 files
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -2110,7 +2110,6 @@
_split_inputs(kwargs, mode)
# Build up every combination of inputs to be run together.
- #input_combos = [single_inputs]
input_combos = _extend_with_matched_combos(single_inputs, matched_multi_inputs)
input_combos = _extend_with_multiplied_combos(input_combos, multiplied_multi_inputs)
@@ -2172,17 +2171,18 @@
"""
input_keys = filter(lambda a: a.endswith('|input'), kwargs)
single_inputs = {}
- multi_inputs = {}
+ matched_multi_inputs = {}
+ multiplied_multi_inputs = {}
for input_key in input_keys:
input_val = kwargs[input_key]
if isinstance(input_val, list):
- multi_inputs[input_key] = input_val
+ input_base = input_key[:-len("|input")]
+ mode_key = "%s|multi_mode" % input_base
+ mode = kwargs.get(mode_key, "matched")
+ if mode == "matched":
+ matched_multi_inputs[input_key] = input_val
+ else:
+ multiplied_multi_inputs[input_key] = input_val
else:
single_inputs[input_key] = input_val
- matched_multi_inputs = {}
- multiplied_multi_inputs = {}
- if mode == "product":
- multiplied_multi_inputs = multi_inputs
- else:
- matched_multi_inputs = multi_inputs
return (single_inputs, matched_multi_inputs, multiplied_multi_inputs)
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 static/images/silk/link.png
Binary file static/images/silk/link.png has changed
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 static/images/silk/link_break.png
Binary file static/images/silk/link_break.png has changed
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -33,12 +33,12 @@
} else {
select.val($('option:last', select).val());
}
+ select.siblings('img').hide();
select.removeAttr('multiple').removeAttr('size');
placeholder = 'type to filter';
} else {
- // Comment out the following line to multiple batch input workflows in UI.
- $('.multiinput').addClass('disabled');
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
+ select.siblings('img').show();
select.attr('multiple', 'multiple').attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
@@ -126,6 +126,57 @@
select.after(filter);
select.width(new_width);
});
+
+ // Augment hidden fields with icons.
+ // http://stackoverflow.com/a/2088430
+ var imgOn='${h.url_for("/static/images/silk/link.png")}';
+ var imgOff='${h.url_for("/static/images/silk/link_break.png")}';
+ $(function(){
+ $(".multi-mode").each(function(){
+ if($(this).val() == "matched") {
+ $(this).before($(document.createElement("img"))
+ .attr({src:imgOn,title:'Checkbox', id:$(this).attr("id")})
+ .css("display", $(this).css("display"))
+ .addClass("chkBoxImg"));
+ } else {
+ $(this).before($(document.createElement("img"))
+ .attr({src:imgOff, title:'Checkbox',id:$(this).attr("id")})
+ .css("display", $(this).css("display"))
+ .addClass("chkBoxImg"));
+ }
+ });
+ $("img.chkBoxImg").click(function(){
+ i= $(this).siblings("input[type=hidden]");
+ s=$(this).attr("src");
+ if(s==imgOn) {
+ $(this).attr("src",imgOff);
+ $(i).val("product");
+ } else {
+ $(this).attr("src",imgOn);
+ $(i).val("matched");
+ }
+ });
+ });
+ $("#tool_form").submit(function(e) {
+ var matchLength = -1;
+ $('span.multiinput_wrap select[name*="|input"]').each(function() {
+ var value = $(this).val();
+ if(value instanceof Array) {
+ // Multi-value
+ if($(this).siblings("input[type=hidden]").val() == "matched") {
+ var length = $(this).val().length;
+ if(matchLength == -1) {
+ matchLength = length;
+ } else if(length != matchLength) {
+ e.preventDefault();
+ alert("Linked inputs must be submitted in equal number.");
+ return false;
+ }
+ }
+ }
+ });
+ return true;
+ });
});
</script></%def>
@@ -260,6 +311,7 @@
%if step.type == 'data_input':
##Input Dataset Step, wrap for multiinput.
<span class='multiinput_wrap'>
+ <input class="multi-mode" type="hidden" name="${str(step.id)}|multi_mode" id="${str(step.id)}|multi_mode" value="matched" />
${param.get_html_field( t, value, other_values ).get_html( str(step.id) + "|" + prefix )}
</span>
%else:
@@ -340,30 +392,6 @@
<form id="tool_form" name="tool_form" method="POST">
## <input type="hidden" name="workflow_name" value="${h.to_unicode( workflow.name ) | h}" />
-<!-- TODO: Implement UI for selecting between product and matched mode
- for batch workflows in multiple inputs are selected for 2 or more
- params.
-
- 1) Delete this line above: $('.multiinput').addClass('disabled');
- 2) Allow user to select between product and matched mode.
-
- If user selected 5 inputs for one param and 5 inputs for another
- in matched mode that will be run the workflow 5 times matching
- each input and in product mode it will run the workflow 25 times
- with every combination of input pairs. If user selects 6 inputs
- for one param and 4 for another, in product mode 24 workflows
- will run and in matched mode the submission will fail.
-
- In matched mode the inputs are matched from top to bottom
- regardless of the order they are actually select in. This
- behavior is I assume the desired behavior but I have only tested
- it in chrome, care should be taken to test behavior on other
- browsers and augment UI to ensure numbers of inputs matches
- up.
--->
-<input type="hidden" name="multiple_input_mode" value="matched" /><!-- product or matched -->
-
-
%if wf_parms:
<div class="metadataForm"><div class="metadataFormTitle">Workflow Parameters</div>
https://bitbucket.org/galaxy/galaxy-central/commits/fa34924860aa/
changeset: fa34924860aa
user: jmchilton
date: 2013-02-23 08:14:22
summary: Touch up UI related to multi input batch mode - move icon up by the multi document icon, switch from icon to CSS span, cleanup variable names.
affected #: 1 file
diff -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 -r fa34924860aaa282fe3c3021a257f2523848a6e6 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -1,5 +1,11 @@
<%inherit file="/base.mako"/>
+<style>
+/* TODO: Move this block into base.less? base.css? Someone more familiar with GUI should move this. */
+.icon-button.link {background:url(../images/silk/link.png) no-repeat;cursor:pointer;float:none;display:inline-block;margin-left:10px;}
+.icon-button.link-broken {background:url(../images/silk/link_break.png) no-repeat;cursor:pointer;float:none;display:inline-block;margin-left:10px;}
+</style>
+
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "libs/jquery/jquery.autocomplete" )}
@@ -33,12 +39,12 @@
} else {
select.val($('option:last', select).val());
}
- select.siblings('img').hide();
+ select.closest('.form-row').children('label').children('span.mode-icon').hide();
select.removeAttr('multiple').removeAttr('size');
placeholder = 'type to filter';
} else {
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
- select.siblings('img').show();
+ select.closest('.form-row').children('label').children('span.mode-icon').show();
select.attr('multiple', 'multiple').attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
@@ -129,30 +135,25 @@
// Augment hidden fields with icons.
// http://stackoverflow.com/a/2088430
- var imgOn='${h.url_for("/static/images/silk/link.png")}';
- var imgOff='${h.url_for("/static/images/silk/link_break.png")}';
$(function(){
$(".multi-mode").each(function(){
if($(this).val() == "matched") {
- $(this).before($(document.createElement("img"))
- .attr({src:imgOn,title:'Checkbox', id:$(this).attr("id")})
- .css("display", $(this).css("display"))
- .addClass("chkBoxImg"));
+ $(this).closest('.form-row').children('label').append($('<span class="icon-button link mode-icon"></span>')
+ .attr({id:$(this).attr("id")})
+ .css("display", $(this).css("display")));
} else {
- $(this).before($(document.createElement("img"))
- .attr({src:imgOff, title:'Checkbox',id:$(this).attr("id")})
- .css("display", $(this).css("display"))
- .addClass("chkBoxImg"));
+ $(this).closest('.form-row').children('label').append($('<span class="icon-button link-broken mode-icon"></span>')
+ .attr({id:$(this).attr("id")})
+ .css("display", $(this).css("display")));
}
});
- $("img.chkBoxImg").click(function(){
- i= $(this).siblings("input[type=hidden]");
- s=$(this).attr("src");
- if(s==imgOn) {
- $(this).attr("src",imgOff);
+ $("span.mode-icon").click(function(){
+ i= $(this).closest('.form-row').find("input[type=hidden]");
+ if($(this).hasClass("link")) {
+ $(this).removeClass("link").addClass("link-broken");
$(i).val("product");
} else {
- $(this).attr("src",imgOn);
+ $(this).removeClass("link-broken").addClass("link");
$(i).val("matched");
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1c2e1625dd8a/
changeset: 1c2e1625dd8a
user: dan
date: 2013-02-22 23:03:49
summary: Fix for displaying error messages on DataToolParameter where optional=True.
affected #: 1 file
diff -r 574e22b584eb7ef8b6168902347473ed3adeccac -r 1c2e1625dd8a419aa1d335ddc19ba34857c05fc4 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1555,7 +1555,7 @@
# although, this should never be called in workflow mode right?
if trans.workflow_building_mode:
return None
- if not value:
+ if not value and not self.optional:
raise ValueError( "History does not include a dataset of the required format / build" )
if value in [None, "None"]:
return None
https://bitbucket.org/galaxy/galaxy-central/commits/c177960e4ed6/
changeset: c177960e4ed6
branch: stable
user: dan
date: 2013-02-22 23:03:49
summary: Fix for displaying error messages on DataToolParameter where optional=True.
affected #: 1 file
diff -r 31b09605fcc313b3e93efb927ac328bfcc42ad82 -r c177960e4ed61925a8b6c858e1f3f8d54c93cb37 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1552,7 +1552,7 @@
# although, this should never be called in workflow mode right?
if trans.workflow_building_mode:
return None
- if not value:
+ if not value and not self.optional:
raise ValueError( "History does not include a dataset of the required format / build" )
if value in [None, "None"]:
return None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Enhance filtering in the repository revisions api.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/574e22b584eb/
changeset: 574e22b584eb
user: greg
date: 2013-02-22 22:41:56
summary: Enhance filtering in the repository revisions api.
affected #: 2 files
diff -r ed6104097dc9b9c519f71a392225e7ed45bfede3 -r 574e22b584eb7ef8b6168902347473ed3adeccac lib/galaxy/webapps/community/api/repository_revision_contents.py
--- a/lib/galaxy/webapps/community/api/repository_revision_contents.py
+++ b/lib/galaxy/webapps/community/api/repository_revision_contents.py
@@ -11,9 +11,11 @@
log = logging.getLogger( __name__ )
def default_value_mapper( trans, repository_metadata ):
- return { 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
- 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+ value_mapper = { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
+ if repository_metadata.time_last_tested:
+ value_mapper[ 'time_last_tested' ] = time_ago( repository_metadata.time_last_tested )
+ return value_mapper
class RepositoryRevisionContentsController( BaseAPIController ):
@web.expose_api
diff -r ed6104097dc9b9c519f71a392225e7ed45bfede3 -r 574e22b584eb7ef8b6168902347473ed3adeccac lib/galaxy/webapps/community/api/repository_revisions.py
--- a/lib/galaxy/webapps/community/api/repository_revisions.py
+++ b/lib/galaxy/webapps/community/api/repository_revisions.py
@@ -2,6 +2,7 @@
from galaxy.web.framework.helpers import time_ago
import galaxy.util.shed_util_common as suc
from galaxy import web, util
+from galaxy.model.orm import and_, or_
from galaxy.web.base.controller import BaseAPIController
from galaxy.web.framework.helpers import is_true
@@ -13,29 +14,56 @@
log = logging.getLogger( __name__ )
def default_value_mapper( trans, repository_metadata ):
- return { 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
- 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+ value_mapper = { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
+ if repository_metadata.time_last_tested:
+ value_mapper[ 'time_last_tested' ] = time_ago( repository_metadata.time_last_tested )
+ return value_mapper
class RepositoryRevisionsController( BaseAPIController ):
"""RESTful controller for interactions with tool shed repository revisions."""
@web.expose_api
- def index( self, trans, downloadable=True, **kwd ):
+ def index( self, trans, **kwd ):
"""
GET /api/repository_revisions
Displays a collection (list) of repository revisions.
"""
rval = []
- downloadable = util.string_as_bool( downloadable )
+ # Build up an anded clause list of filters.
+ clause_list = []
+ # Filter by downloadable if received.
+ downloadable = kwd.get( 'downloadable', None )
+ if downloadable is not None:
+ clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) )
+ # Filter by tools_functionally_correct if received.
+ tools_functionally_correct = kwd.get( 'tools_functionally_correct', None )
+ if tools_functionally_correct is not None:
+ clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) )
+ # Filter by do_not_test if received.
+ do_not_test = kwd.get( 'do_not_test', None )
+ if do_not_test is not None:
+ clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) )
+ # Filter by must_include_tools if received.
+ must_include_tools = kwd.get( 'must_include_tools', False )
try:
query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
- .filter( trans.app.model.RepositoryMetadata.table.c.downloadable == downloadable ) \
+ .filter( and_( *clause_list ) ) \
.order_by( trans.app.model.RepositoryMetadata.table.c.repository_id ) \
.all()
for repository_metadata in query:
- item = repository_metadata.get_api_value( view='collection', value_mapper=default_value_mapper( trans, repository_metadata ) )
- item[ 'url' ] = web.url_for( 'repository_revision', id=trans.security.encode_id( repository_metadata.id ) )
- rval.append( item )
+ if must_include_tools:
+ metadata = repository_metadata.metadata
+ if 'tools' in metadata:
+ ok_to_return = True
+ else:
+ ok_to_return = False
+ else:
+ ok_to_return = True
+ if ok_to_return:
+ item = repository_metadata.get_api_value( view='collection',
+ value_mapper=default_value_mapper( trans, repository_metadata ) )
+ item[ 'url' ] = web.url_for( 'repository_revision', id=trans.security.encode_id( repository_metadata.id ) )
+ rval.append( item )
except Exception, e:
rval = "Error in the Tool Shed repository_revisions API in index: " + str( e )
log.error( rval + ": %s" % str( e ) )
@@ -49,7 +77,8 @@
"""
try:
repository_metadata = suc.get_repository_metadata_by_id( trans, id )
- repository_data = repository_metadata.get_api_value( view='element', value_mapper=default_value_mapper( trans, repository_metadata ) )
+ repository_data = repository_metadata.get_api_value( view='element',
+ value_mapper=default_value_mapper( trans, repository_metadata ) )
repository_data[ 'contents_url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=id )
except Exception, e:
message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for deleting and undeleting repositories in the tool shed: if deleting, all installable revisions are marked as not installable, and if undeleting, all revisions are inspected and those determined to be installable are marked accordingly.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ed6104097dc9/
changeset: ed6104097dc9
user: greg
date: 2013-02-22 21:19:47
summary: Fix for deleting and undeleting repositories in the tool shed: if deleting, all installable revisions are marked as not installable, and if undeleting, all revisions are inspected and those determined to be installable are marked accordingly.
affected #: 1 file
diff -r 3bdb4291e5e666c4d4b86e184b4599e61b847864 -r ed6104097dc9b9c519f71a392225e7ed45bfede3 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -586,12 +586,17 @@
deleted_repositories = ""
for repository_id in ids:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- if not repository.deleted:
- repository.deleted = True
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- count += 1
- deleted_repositories += " %s " % repository.name
+ if repository:
+ if not repository.deleted:
+ # Mark all installable repository_metadata records as not installable.
+ for repository_metadata in repository.downloadable_revisions:
+ repository_metadata.downloadable = False
+ trans.sa_session.add( repository_metadata )
+ repository.deleted = True
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ count += 1
+ deleted_repositories += " %s " % repository.name
if count:
message = "Deleted %d %s: %s" % ( count, inflector.cond_plural( len( ids ), "repository" ), deleted_repositories )
else:
@@ -740,12 +745,20 @@
undeleted_repositories = ""
for repository_id in ids:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- if repository.deleted:
- repository.deleted = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- count += 1
- undeleted_repositories += " %s" % repository.name
+ if repository:
+ if repository.deleted:
+ # Inspect all repository_metadata records to determine those that are installable, and mark them accordingly.
+ for repository_metadata in repository.metadata_revisions:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if suc.is_downloadable( metadata ):
+ repository_metadata.downloadable = True
+ trans.sa_session.add( repository_metadata )
+ repository.deleted = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_repositories += " %s" % repository.name
if count:
message = "Undeleted %d %s: %s" % ( count, inflector.cond_plural( count, "repository" ), undeleted_repositories )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Fix to new-style display_application url mapping/formation; display_applications/link_generator: clean up
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3bdb4291e5e6/
changeset: 3bdb4291e5e6
user: carlfeberhard
date: 2013-02-22 20:58:50
summary: Fix to new-style display_application url mapping/formation; display_applications/link_generator: clean up
affected #: 2 files
diff -r 5f10f61335fbcfb7982a879edc80325f5a73402f -r 3bdb4291e5e666c4d4b86e184b4599e61b847864 lib/galaxy/datatypes/display_applications/link_generator.py
--- a/lib/galaxy/datatypes/display_applications/link_generator.py
+++ b/lib/galaxy/datatypes/display_applications/link_generator.py
@@ -1,4 +1,6 @@
-"""Separating Transaction based elements of display applications from datatypes.
+"""Classes to generate links for display applications.
+
+Separating Transaction based elements of display applications from datatypes.
"""
import urllib
@@ -10,19 +12,22 @@
from galaxy import util
from galaxy.web import url_for
-
from galaxy.datatypes.interval import Interval, Gff, Wiggle, CustomTrack
+#TODO: Ideally, these classes would be instantiated in the trans (or some other semi-persistant fixture)
+# Currently, these are instantiated per HDA which is not the best solution
+
+#TODO: these could be extended to handle file_function and parse/contain the builds.txt files
def get_display_app_link_generator( display_app_name ):
"""Returns an instance of the proper link generator class
based on the display_app_name or DisplayAppLinkGenerator
- if the name is unrecognized.
+ if the display_app_name is unrecognized.
"""
if display_app_name == 'ucsc':
return UCSCDisplayAppLinkGenerator()
- if display_app_name == 'gbrowse':
+ elif display_app_name == 'gbrowse':
return GBrowseDisplayAppLinkGenerator()
return DisplayAppLinkGenerator()
@@ -58,9 +63,10 @@
class UCSCDisplayAppLinkGenerator( DisplayAppLinkGenerator ):
- """Class for UCSC display application link generators.
+ """Class for generating links to display data in the
+ UCSC genome browser.
- This class returns UCSC main and test links for the following datatypes:
+ This class returns links for the following datatypes and their subclasses:
Interval, Wiggle, Gff, CustomTrack
"""
def __init__( self ):
@@ -69,7 +75,6 @@
def _link_function_from_datatype( self, datatype ):
"""Dispatch to proper link generating function based on datatype.
"""
- # they're all the same
if( ( isinstance( datatype, Interval ) )
or ( isinstance( datatype, Wiggle ) )
or ( isinstance( datatype, Gff ) )
@@ -83,8 +88,6 @@
and content of dataset.
"""
# this is a refactor of Interval.ucsc_links, GFF.ucsc_links, Wiggle.ucsc_links, and CustomTrack.ucsc_links
- # ...which are all the same function
-
#TODO: app vars can be moved into init (and base_url as well)
chrom, start, stop = dataset.datatype.get_estimated_display_viewport( dataset )
if chrom is None:
@@ -107,10 +110,11 @@
class GBrowseDisplayAppLinkGenerator( DisplayAppLinkGenerator ):
- """Class for UCSC display application link generators.
+ """Class for generating links to display data in the
+ GBrowse genome browser.
- This class returns UCSC main and test links for the following datatypes:
- Interval, Wiggle, Gff, CustomTrack
+ This class returns links for the following datatypes and their subclasses:
+ Gff, Wiggle
"""
def __init__( self ):
self.display_app_name = 'gbrowse'
@@ -118,7 +122,6 @@
def _link_function_from_datatype( self, datatype ):
"""Dispatch to proper link generating function based on datatype.
"""
- # they're all the same
if( ( isinstance( datatype, Gff ) )
or ( isinstance( datatype, Wiggle ) ) ):
return self.gbrowse_links
diff -r 5f10f61335fbcfb7982a879edc80325f5a73402f -r 3bdb4291e5e666c4d4b86e184b4599e61b847864 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -244,7 +244,7 @@
def get_display_app_url( display_app_link, hda, trans ):
web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
dataset_hash, user_hash = util.encode_dataset_user( trans, hda, None )
- return web_url_for( controller='/dataset',
+ return web_url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
user_id=user_hash,
@@ -280,4 +280,3 @@
display_apps.append( dict( label=hda.datatype.get_display_label( display_app_name ), links=app_links ) )
return display_apps
-
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Enhancements to the tool shed api to enable the curretnly most important updates for repository_metadata records.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5f10f61335fb/
changeset: 5f10f61335fb
user: greg
date: 2013-02-22 20:35:34
summary: Enhancements to the tool shed api to enable the curretnly most important updates for repository_metadata records.
affected #: 7 files
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repositories.py
--- a/lib/galaxy/webapps/community/api/repositories.py
+++ b/lib/galaxy/webapps/community/api/repositories.py
@@ -32,7 +32,7 @@
item[ 'url' ] = web.url_for( 'repository_contents', repository_id=trans.security.encode_id( repository.id ) )
rval.append( item )
except Exception, e:
- message = "Error in the Tool Shed API at index: %s" % str( e )
+ message = "Error in the Tool Shed repositories API in index: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
@@ -50,8 +50,8 @@
repository_data = repository.get_api_value( view='element', value_mapper=value_mapper )
repository_data[ 'contents_url' ] = web.url_for( 'repository_contents', repository_id=id )
except Exception, e:
- message = "Error in the Tool Shed API at show: %s" % str( e )
+ message = "Error in the Tool Shed repositories API in show: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
- return repository_data
\ No newline at end of file
+ return repository_data
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repository_contents.py
--- a/lib/galaxy/webapps/community/api/repository_contents.py
+++ b/lib/galaxy/webapps/community/api/repository_contents.py
@@ -11,21 +11,24 @@
class RepositoryContentsController( BaseAPIController ):
@web.expose_api
- def index( self, trans, repository_id, **kwd ):
+ def index( self, trans, **kwd ):
"""
- GET /api/repositories/{encoded_repsository_id}/contents
- Displays a collection (list) of repository contents.
+ GET /api/repositories/{encoded_repository_id}/contents
+ Displays a collection (dictionary) of repository contents.
:param repository_id: an encoded id string of the `Repository` to inspect
"""
rval = []
+ repository_id = kwd.get( 'repository_id', None )
try:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_dict = repository.as_dict( trans )
+ value_mapper={ 'id' : repository_id,
+ 'user_id' : trans.security.encode_id( repository.user_id ) }
+ repository_dict = repository.as_dict( value_mapper )
repository_dict[ 'url' ] = web.url_for( 'repository_contents', repository_id=repository_id )
rval.append( repository_dict )
except Exception, e:
- message = "Error in repository_contents API: %s" % str( e )
+ message = "Error in the Tool Shed repository_contents API in index: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repository_revision_contents.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/api/repository_revision_contents.py
@@ -0,0 +1,39 @@
+import logging
+from galaxy.web.framework.helpers import time_ago
+import galaxy.util.shed_util_common as suc
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+
+import pkg_resources
+pkg_resources.require( "Routes" )
+import routes
+
+log = logging.getLogger( __name__ )
+
+def default_value_mapper( trans, repository_metadata ):
+ return { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
+ 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+
+class RepositoryRevisionContentsController( BaseAPIController ):
+ @web.expose_api
+ def index( self, trans, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded_repository_metadata_id}/contents
+ Displays a collection (dictionary) of repository_metadata contents.
+
+ :param repository_metadata_id: an encoded id string of the `RepositoryMetadata` to inspect
+ """
+ rval = []
+ repository_metadata_id = kwd.get( 'repository_metadata_id', None )
+ try:
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
+ repository_dict[ 'url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=repository_metadata_id )
+ rval.append( repository_dict )
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revision_contents API in index: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+ return rval
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repository_revisions.py
--- a/lib/galaxy/webapps/community/api/repository_revisions.py
+++ b/lib/galaxy/webapps/community/api/repository_revisions.py
@@ -1,3 +1,5 @@
+import datetime
+from galaxy.web.framework.helpers import time_ago
import galaxy.util.shed_util_common as suc
from galaxy import web, util
from galaxy.web.base.controller import BaseAPIController
@@ -10,6 +12,11 @@
log = logging.getLogger( __name__ )
+def default_value_mapper( trans, repository_metadata ):
+ return { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
+ 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+
class RepositoryRevisionsController( BaseAPIController ):
"""RESTful controller for interactions with tool shed repository revisions."""
@web.expose_api
@@ -26,13 +33,56 @@
.order_by( trans.app.model.RepositoryMetadata.table.c.repository_id ) \
.all()
for repository_metadata in query:
- value_mapper={ 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
- item = repository_metadata.get_api_value( view='collection', value_mapper=value_mapper )
+ item = repository_metadata.get_api_value( view='collection', value_mapper=default_value_mapper( trans, repository_metadata ) )
item[ 'url' ] = web.url_for( 'repository_revision', id=trans.security.encode_id( repository_metadata.id ) )
rval.append( item )
except Exception, e:
- rval = "Error in repository_revisions API at index: " + str( e )
+ rval = "Error in the Tool Shed repository_revisions API in index: " + str( e )
log.error( rval + ": %s" % str( e ) )
trans.response.status = 500
return rval
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded_repository_metadata_id}
+ Displays information about a repository_metadata record in the Tool Shed.
+ """
+ try:
+ repository_metadata = suc.get_repository_metadata_by_id( trans, id )
+ repository_data = repository_metadata.get_api_value( view='element', value_mapper=default_value_mapper( trans, repository_metadata ) )
+ repository_data[ 'contents_url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=id )
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+ return repository_data
+ @web.expose_api
+ def update( self, trans, payload, **kwd ):
+ """
+ PUT /api/repository_revisions/{encoded_repository_metadata_id}/{payload}
+ Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload.
+ """
+ repository_metadata_id = kwd.get( 'id', None )
+ try:
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ flush_needed = False
+ for key, new_value in payload.items():
+ if hasattr( repository_metadata, key ):
+ old_value = getattr( repository_metadata, key )
+ setattr( repository_metadata, key, new_value )
+ if key in [ 'tools_functionally_correct', 'time_last_tested' ]:
+ # Automatically update repository_metadata.time_last_tested.
+ repository_metadata.time_last_tested = datetime.datetime.utcnow()
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+ item = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
+ item[ 'url' ] = web.url_for( 'repository_revision', id=repository_metadata_id )
+ return [ item ]
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/buildapp.py
--- a/lib/galaxy/webapps/community/buildapp.py
+++ b/lib/galaxy/webapps/community/buildapp.py
@@ -77,6 +77,12 @@
name_prefix='repository_',
path_prefix='/api/repositories/:repository_id',
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
+ webapp.api_mapper.resource( 'content',
+ 'contents',
+ controller='repository_revision_contents',
+ name_prefix='repository_revision_',
+ path_prefix='/api/repository_revisions/:repository_metadata_id',
+ parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
webapp.api_mapper.resource( 'repository', 'repositories', path_prefix='/api' )
webapp.api_mapper.resource( 'repository_revision', 'repository_revisions', path_prefix='/api' )
webapp.finalize_config()
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -133,11 +133,8 @@
self.email_alerts = email_alerts
self.times_downloaded = times_downloaded
self.deprecated = deprecated
- def as_dict( self, trans ):
- value_mapper={ 'id' : trans.security.encode_id( self.id ),
- 'user_id' : trans.security.encode_id( self.user_id ) }
- repository_dict = self.get_api_value( view='element', value_mapper=value_mapper )
- return repository_dict
+ def as_dict( self, value_mapper=None ):
+ return self.get_api_value( view='element', value_mapper=value_mapper )
def get_api_value( self, view='collection', value_mapper=None ):
if value_mapper is None:
value_mapper = {}
@@ -194,10 +191,13 @@
fp.close()
class RepositoryMetadata( object, APIItem ):
- api_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable' )
- api_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable' )
- def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False,
+ api_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'tools_functionally_correct',
+ 'do_not_test', 'time_last_tested', 'tool_test_errors' )
+ api_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'tools_functionally_correct',
+ 'do_not_test', 'time_last_tested', 'tool_test_errors' )
+ def __init__( self, id=None, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False,
tools_functionally_correct=False, do_not_test=False, time_last_tested=None, tool_test_errors=None ):
+ self.id = id
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.metadata = metadata or dict()
@@ -208,6 +208,8 @@
self.do_not_test = do_not_test
self.time_last_tested = time_last_tested
self.tool_test_errors = tool_test_errors
+ def as_dict( self, value_mapper=None ):
+ return self.get_api_value( view='element', value_mapper=value_mapper )
def get_api_value( self, view='collection', value_mapper=None ):
if value_mapper is None:
value_mapper = {}
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f scripts/api/tool_shed_repository_revision_update.py
--- /dev/null
+++ b/scripts/api/tool_shed_repository_revision_update.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+"""
+PUT/update script to update appropriate values in a repository_metadata table record in the Tool Shed.
+
+usage: tool_shed_repository_revision_update.py key url key1=value1 key2=value2 ...
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import update
+
+import pkg_resources
+pkg_resources.require( "simplejson" )
+
+import simplejson
+
+to_json_string = simplejson.dumps
+from_json_string = simplejson.loads
+
+data = {}
+for key, value in [ kwarg.split( '=', 1 ) for kwarg in sys.argv[ 3: ] ]:
+ """
+ This example script will properly handle updating the value of one or more of the following RepositoryMetadata attributes:
+ tools_functionally_correct, do_not_test, tool_test_errors
+ """
+ if key in [ 'tools_functionally_correct', 'do_not_test' ]:
+ if str( value ).lower() in [ 'true', 'yes', 'on' ]:
+ new_value = True
+ else:
+ new_value = False
+ elif key in [ 'tool_test_errors' ]:
+ new_value = from_json_string( value )
+ else:
+ new_value = str( value )
+ data[ key ] = new_value
+
+update( sys.argv[ 1 ], sys.argv[ 2 ], data )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0