galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2013
- 1 participants
- 207 discussions
commit/galaxy-central: greg: Fix type in my last commit - thanks John!
by commits-noreply@bitbucket.org 02 Dec '13
by commits-noreply@bitbucket.org 02 Dec '13
02 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f1729c6dcba8/
Changeset: f1729c6dcba8
User: greg
Date: 2013-12-03 03:35:00
Summary: Fix type in my last commit - thanks John!
Affected #: 1 file
diff -r cac7a527140e097e6e7f1f5d0fdcf588a8183ece -r f1729c6dcba86603a9e471688df633251511a22b lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1533,7 +1533,7 @@
if file_name:
if os.path.exists( file_name ):
try:
- os.remove( dir )
+ os.remove( file_name )
except:
pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
02 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cac7a527140e/
Changeset: cac7a527140e
User: greg
Date: 2013-12-03 02:36:59
Summary: Tweak changeset 5b8c91e4183664c90f4d95699ab971e674ffd67d which created symlinks to a required tool dep endncy env.sh file for dependent tool dependencies being installed. This approach was better than the previous approach which parsed the requried repository's tool_dependencies.xml file to create the env.sh file. This changeset uses a slightly different approach: it creates an env.sh file for the dependent tool dependency whose first line "sources" the required tool dende pendcy's env.sh file. This allows for the dependent env.sh file to be managed by its tool dependency (e.g., its tool dependency may require additional env.sh paths beyond those contained in the requyired tool dependency's env.sh file). This changeset also handles sysncing the database with the file system when tool dependencies are being installed when handling a complex repository dependency definition. This process used to be handled only when not dealing with a complex repository dependency definition. This fix should correct problems encounted by the Tool Shed's install and test framework which were introduced in 5b8c91e4183664c90f4d95699ab971e674ffd67d.
Affected #: 5 files
diff -r ef33472f0aa296e55fd811357e09366f9cee5467 -r cac7a527140e097e6e7f1f5d0fdcf588a8183ece lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3471,46 +3471,6 @@
def can_reinstall_or_activate( self ):
return self.deleted
- @property
- def installing( self ):
- """
- Used to determine if tool dependencies can denote this repository as
- installing.
- """
- return self.status not in [ self.installation_status.DEACTIVATED,
- self.installation_status.UNINSTALLED,
- self.installation_status.ERROR,
- self.installation_status.INSTALLED,
- self.installation_status.NEW,
- ]
-
- @property
- def installation_complete( self ):
- """
- Used to determine if tool dependency installations can proceed.
- Installed artifacts must be available on disk.
- """
- return self.status in [ self.installation_status.DEACTIVATED,
- self.installation_status.INSTALLED,
- ]
-
- def to_dict( self, view='collection', value_mapper=None ):
- if value_mapper is None:
- value_mapper = {}
- rval = {}
- try:
- visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
- except AttributeError:
- raise Exception( 'Unknown API view: %s' % view )
- for key in visible_keys:
- try:
- rval[ key ] = self.__getattribute__( key )
- if key in value_mapper:
- rval[ key ] = value_mapper.get( key, rval[ key ] )
- except AttributeError:
- rval[ key ] = None
- return rval
-
def get_shed_config_filename( self ):
shed_config_filename = None
if self.metadata:
@@ -3659,7 +3619,8 @@
"""Return the repository's tool dependencies that are currently installed."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
- if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED, ToolDependency.installation_status.ERROR ]:
+ if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
+ ToolDependency.installation_status.ERROR ]:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@@ -3670,6 +3631,11 @@
return False
@property
+ def is_installed( self ):
+ return self.status in [ self.installation_status.DEACTIVATED,
+ self.installation_status.INSTALLED ]
+
+ @property
def is_latest_installable_revision( self ):
if self.tool_shed_status:
return galaxy.util.asbool( self.tool_shed_status.get( 'latest_installable_revision', False ) )
@@ -3804,6 +3770,23 @@
shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
+ def to_dict( self, view='collection', value_mapper=None ):
+ if value_mapper is None:
+ value_mapper = {}
+ rval = {}
+ try:
+ visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
+ except AttributeError:
+ raise Exception( 'Unknown API view: %s' % view )
+ for key in visible_keys:
+ try:
+ rval[ key ] = self.__getattribute__( key )
+ if key in value_mapper:
+ rval[ key ] = value_mapper.get( key, rval[ key ] )
+ except AttributeError:
+ rval[ key ] = None
+ return rval
+
@property
def tool_dependencies_being_installed( self ):
dependencies_being_installed = []
diff -r ef33472f0aa296e55fd811357e09366f9cee5467 -r cac7a527140e097e6e7f1f5d0fdcf588a8183ece lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -40,14 +40,22 @@
raise Exception( message )
return None
-
-def find_complex_dependency_package( app, dependent_install_dir, required_install_dir, tool_shed_repository, required_repository, package_name, package_version, tool_dependencies_config ):
+def create_tool_dependency_with_initialized_env_sh_file( app, dependent_install_dir, tool_shed_repository, required_repository, package_name,
+ package_version, tool_dependencies_config ):
"""
+ Create or get a tool_dependency record that is defined by the received package_name and package_version. An env.sh file will be
+ created for the tool_dependency in the received dependent_install_dir.
"""
+ #The received required_repository refers to a tool_shed_repository record that is defined as a complex repository dependency for this
+ # tool_dependency. The required_repository may or may not be currently installed (it doesn't matter). If it is installed, it is
+ # associated with a tool_dependency that has an env.sh file that this new tool_dependency must be able to locate and "source". If it
+ # is not installed, we can still determine where that env.sh file will be, so we'll initialize this new tool_dependency's env.sh file
+ # in either case. If the require repository end up with an installation error, this new tool dependency will still be fine because its
+ # containing repository will be defined as missing dependencies.
tool_dependencies = []
if not os.path.exists( dependent_install_dir ):
os.makedirs( dependent_install_dir )
- env_file = None
+ required_tool_dependency_env_file_path = None
if tool_dependencies_config:
required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
if required_td_tree:
@@ -58,51 +66,52 @@
# <package name="bwa" version="0.5.9">
required_td_package_name = required_td_elem.get( 'name', None )
required_td_package_version = required_td_elem.get( 'version', None )
+ # Check the database to see if we have a record for the required tool dependency (we may not which is ok). If we
+ # find a record, we need to see if it is in an error state and if so handle it appropriately.
+ required_tool_dependency = \
+ tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app,
+ required_repository,
+ required_td_package_name,
+ required_td_package_version,
+ 'package' )
if required_td_package_name == package_name and required_td_package_version == package_version:
- tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=app.model.ToolDependency.installation_status.NEVER_INSTALLED,
- set_status=True )
-
- if required_repository.installing:
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.INSTALLING )
- # What happens if dependent repository fails or is
- # uninstalled during this process.
- env_file = required_repository_package_env( app, package_name, package_version, required_repository )
- if required_repository.installation_complete:
- if not os.path.exists( env_file ):
- error_message = 'env.sh file %s for package %s in dependendent repository could not be found. Required repository has status %s.' % ( package_name, env_file, required_repository.status )
- tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
+ # Get or create a database tool_dependency record with which the installed package on disk will be associated.
+ tool_dependency = \
+ tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.model.ToolDependency.installation_status.NEVER_INSTALLED,
+ set_status=True )
+ # Create an env.sh file for the tool_dependency whose first line will source the env.sh file located in
+ # the path defined by required_tool_dependency_env_file_path. It doesn't matter if the required env.sh
+ # file currently exists..
+ required_tool_dependency_env_file_path = \
+ tool_dependency_util.get_required_repository_package_env_sh_path( app, package_name, package_version, required_repository )
+ env_file_builder = fabric_util.EnvFileBuilder( tool_dependency.installation_directory( app ) )
+ env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path )
+ return_code = env_file_builder.return_code
+ if return_code:
+ error_message = 'Error defining env.sh file for package %s, return_code: %s' % ( str( package_name, str( return_code ) ) )
+ tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
tool_dependency,
error_message,
remove_installation_path=False )
- else:
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.INSTALLED )
+ elif required_tool_dependency.in_error_state:
+ error_message = "This tool dependency's required tool dependency %s has status %s." % \
+ ( str( required_tool_dependency.name ), str( required_tool_dependency.status ) )
+ tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
else:
- # Ekk - handling tool depednencies for a dependent
- # repository that is not installed.
- #
- # Go ahead a return the env.sh file - Galaxy will
- # proceed to create an invalid symbolic link.
- # This is subtle-ly different than the previous
- # behavior which would have recreated an env.sh
- # from the the required repository's
- # tool_dependencies.xml but since it was not
- # installed all of the values inside would be
- # invalid path modifications. Either way, this file
- # is junk until the required repository is
- # installed properly.
- pass
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=app.model.ToolDependency.installation_status.INSTALLED )
tool_dependencies.append( tool_dependency )
- return tool_dependencies, env_file
-
+ return tool_dependencies
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
"""Return the absolute path to a specified disk file contained in a repository."""
@@ -157,6 +166,12 @@
def handle_complex_repository_dependency_for_package( app, elem, package_name, package_version, tool_shed_repository ):
+ """
+ Inspect the repository defined by a complex repository dependency definition and take certain steps to enable installation
+ of the received package name and version to proceed. The received elem is the <repository> tag set which defines the complex
+ repository dependency. The received tool_shed_repository is the installed tool shed repository for which the tool dependency
+ defined by the received package_name and package_version is being installed.
+ """
handled_tool_dependencies = []
tool_shed = elem.attrib[ 'toolshed' ]
required_repository_name = elem.attrib[ 'name' ]
@@ -179,7 +194,7 @@
tool_dependency_type='package',
tool_dependency_name=package_name,
tool_dependency_version=package_version )
- # Define the this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the
+ # Define this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the
# required repository's installed tool dependency package.
dependent_install_dir = tool_dependency_util.get_tool_dependency_install_dir( app=app,
repository_name=tool_shed_repository.name,
@@ -188,58 +203,58 @@
tool_dependency_type='package',
tool_dependency_name=package_name,
tool_dependency_version=package_version )
- # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package.
- # We can get everything we need from the discovered installed required_repository.
- if required_repository.installation_complete:
- if not os.path.exists( required_repository_package_install_dir ):
- print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir )
- repo_files_dir = required_repository.repo_files_directory( app )
- tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' )
- if tool_dependencies_config:
- config_to_use = tool_dependencies_config
+ if os.path.exists( dependent_install_dir ):
+ # Notice that we'll throw away the following tool_dependency if it can be installed.
+ tool_dependency, can_install_tool_dependency = tool_dependency_util.sync_database_with_file_system( app,
+ tool_shed_repository,
+ package_name,
+ package_version,
+ dependent_install_dir,
+ tool_dependency_type='package' )
+ else:
+ can_install_tool_dependency = True
+ if can_install_tool_dependency:
+ # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package.
+ # We can get everything we need from the discovered installed required_repository.
+ if required_repository.is_installed:
+ if not os.path.exists( required_repository_package_install_dir ):
+ print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir )
+ repo_files_dir = required_repository.repo_files_directory( app )
+ tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' )
+ if tool_dependencies_config:
+ config_to_use = tool_dependencies_config
+ else:
+ message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \
+ ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
+ raise Exception( message )
else:
- message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \
- ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
- raise Exception( message )
+ # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision
+ # should be updated if it's not current.
+ text = get_updated_changeset_revisions_from_tool_shed( app=app,
+ tool_shed_url=tool_shed,
+ name=required_repository_name,
+ owner=required_repository_owner,
+ changeset_revision=required_repository_changeset_revision )
+ if text:
+ updated_changeset_revisions = listify( text )
+ # The list of changeset revisions is in reverse order, so the newest will be first.
+ required_repository_changeset_revision = updated_changeset_revisions[ 0 ]
+ # Make a call to the tool shed to get the required repository's tool_dependencies.xml file.
+ tmp_filename = create_temporary_tool_dependencies_config( app,
+ tool_shed,
+ required_repository_name,
+ required_repository_owner,
+ required_repository_changeset_revision )
+ config_to_use = tmp_filename
+ handled_tool_dependencies = create_tool_dependency_with_initialized_env_sh_file( app=app,
+ dependent_install_dir=dependent_install_dir,
+ tool_shed_repository=tool_shed_repository,
+ required_repository=required_repository,
+ package_name=package_name,
+ package_version=package_version,
+ tool_dependencies_config=config_to_use )
+ suc.remove_file( tmp_filename )
else:
- # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision
- # should be updated if it's not current.
- text = get_updated_changeset_revisions_from_tool_shed( app=app,
- tool_shed_url=tool_shed,
- name=required_repository_name,
- owner=required_repository_owner,
- changeset_revision=required_repository_changeset_revision )
- if text:
- updated_changeset_revisions = listify( text )
- # The list of changeset revisions is in reverse order, so the newest will be first.
- required_repository_changeset_revision = updated_changeset_revisions[ 0 ]
- # Make a call to the tool shed to get the required repository's tool_dependencies.xml file.
- tmp_filename = create_temporary_tool_dependencies_config( app,
- tool_shed,
- required_repository_name,
- required_repository_owner,
- required_repository_changeset_revision )
- config_to_use = tmp_filename
-
- tool_dependencies, package_env_sh_file = find_complex_dependency_package(
- app=app,
- dependent_install_dir=dependent_install_dir,
- required_install_dir=required_repository_package_install_dir,
- tool_shed_repository=tool_shed_repository,
- required_repository=required_repository,
- package_name=package_name,
- package_version=package_version,
- tool_dependencies_config=config_to_use
- )
- if package_env_sh_file:
- os.symlink( package_env_sh_file, os.path.join( dependent_install_dir, "env.sh" ) )
- if tmp_filename:
- try:
- os.remove( tmp_filename )
- except:
- pass
- for tool_dependency in tool_dependencies:
- tool_dependency = __mark_tool_dependency_installed( app, tool_dependency)
handled_tool_dependencies.append( tool_dependency )
else:
message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \
@@ -247,7 +262,6 @@
raise Exception( message )
return handled_tool_dependencies
-
def install_and_build_package_via_fabric( app, tool_dependency, actions_dict ):
sa_session = app.model.context.current
try:
@@ -262,10 +276,9 @@
tool_dependency,
error_message,
remove_installation_path=False )
- tool_dependency = __mark_tool_dependency_installed( app, tool_dependency)
+ tool_dependency = tool_dependency_util.mark_tool_dependency_installed( app, tool_dependency )
return tool_dependency
-
def install_package( app, elem, tool_shed_repository, tool_dependencies=None ):
# The value of tool_dependencies is a partial or full list of ToolDependency records associated with the tool_shed_repository.
sa_session = app.model.context.current
@@ -298,57 +311,14 @@
tool_dependency_version=package_version )
can_install_tool_dependency = True
if os.path.exists( install_dir ):
- # The tool dependency installation directory exists, check for the presence of fabric_util.INSTALLATION_LOG,
- # which indicates a successful installation. This currently assumes that the tool dependency record will
- # always exist in the database.
- tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app,
- tool_shed_repository,
- package_name,
- package_version,
- 'package' )
- if tool_dependency.status == app.model.ToolDependency.installation_status.INSTALLING:
- # The tool dependency is in an Installing state, so we don't want to do anything to it. If the tool
- # dependency is being installed by someone else, we don't want to interfere with that. This assumes
- # the installation by "someone else" is not hung in an Installing state, which is a weakness if that
- # "someone else" never repaired it.
- log.debug( 'Skipping installation of tool dependency %s version %s because it has a status of %s' % \
- ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency.status ) ) )
- can_install_tool_dependency = False
- else:
- tool_dependency_installation_directory_contents = os.listdir( install_dir )
- if fabric_util.INSTALLATION_LOG in tool_dependency_installation_directory_contents:
- # Since this tool dependency's installation directory contains an installation log, we consider it to be
- # installed. In some cases the record may be missing from the database due to some activity outside of
- # the control of the Tool Shed. Since a new record was created for it and we don't know the state of the
- # files on disk, we will set it to an error state (unless we are running Tool Shed functional tests - see
- # below).
- log.debug( 'Skipping installation of tool dependency %s version %s because it is installed in %s' % \
- ( str( tool_dependency.name ), str( tool_dependency.version ), str( install_dir ) ) )
- can_install_tool_dependency = False
- if app.config.running_functional_tests:
- # If we are running functional tests, the state will be set to Installed because previously compiled
- # tool dependencies are not deleted by default, from the "install and test" framework..
- tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
- else:
- error_message = 'The installation directory for this tool dependency had contents, but the database had no record. '
- error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the '
- error_message += 'missing database record, it is automatically set to Error.'
- tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
- tool_dependency.error_message = error_message
- else:
- error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \
- ( str( install_dir ), str( package_name ), str( package_version ), str( fabric_util.INSTALLATION_LOG ) )
- error_message += ' is missing. This indicates an installation error so the tool dependency is being'
- error_message += ' prepared for re-installation.'
- print error_message
- tool_dependency.status = app.model.ToolDependency.installation_status.NEVER_INSTALLED
- try:
- shutil.rmtree( install_dir )
- except Exception, e:
- log.exception( 'Unable to remove installation path %s.', install_dir )
- can_install_tool_dependency = True
- sa_session.add( tool_dependency )
- sa_session.flush()
+ # Notice that we'll throw away the following tool_dependency if it can be installed.
+ tool_dependency, can_install_tool_dependency = \
+ tool_dependency_util.sync_database_with_file_system( app,
+ tool_shed_repository,
+ package_name,
+ package_version,
+ install_dir,
+ tool_dependency_type='package' )
else:
can_install_tool_dependency = True
if can_install_tool_dependency:
@@ -775,23 +745,6 @@
tool_dependency = install_and_build_package_via_fabric( app, tool_dependency, actions_dict )
return tool_dependency
-
-# TODO: Move to tool_dependency_util?
-def required_repository_package_env( app, package_name, package_version, required_repository ):
- """
- Return path to env.sh file in required repository if the required repository has been installed.
- """
- env_sh_file_dir = tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=required_repository.name,
- repository_owner=required_repository.owner,
- repository_changeset_revision=required_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
- return env_sh_file_path
-
-
def run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
@@ -846,10 +799,7 @@
tmp_stderr = open( tmp_name, 'rb' )
message = '%s\n' % str( tmp_stderr.read() )
tmp_stderr.close()
- try:
- os.remove( tmp_name )
- except:
- pass
+ suc.remove_file( tmp_name )
return returncode, message
def set_environment( app, elem, tool_shed_repository, attr_tups_of_dependencies_for_install ):
@@ -935,17 +885,3 @@
for arg in args:
parts.append( arg.strip( '/' ) )
return '/'.join( parts )
-
-
-# TODO: Move to tool_dependency_util?
-def __mark_tool_dependency_installed( app, tool_dependency ):
- if tool_dependency.status not in [ app.model.ToolDependency.installation_status.ERROR,
- app.model.ToolDependency.installation_status.INSTALLED ]:
- log.debug( 'Changing status for tool dependency %s from %s to %s.' % \
- ( str( tool_dependency.name ), str( tool_dependency.status ), str( app.model.ToolDependency.installation_status.INSTALLED ) ) )
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.model.ToolDependency.installation_status.INSTALLED,
- error_message=None,
- remove_from_disk=False )
- return tool_dependency
diff -r ef33472f0aa296e55fd811357e09366f9cee5467 -r cac7a527140e097e6e7f1f5d0fdcf588a8183ece lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -458,6 +458,7 @@
if index is not None:
tool_dependency = tool_dependencies[ index ]
if tool_dependency.can_install:
+ # The database record is currently in a state that allows us to install the package on the file system.
try:
dependencies_ignored = app.toolbox.dependency_manager and not app.toolbox.dependency_manager.uses_tool_shed_dependencies()
if dependencies_ignored:
diff -r ef33472f0aa296e55fd811357e09366f9cee5467 -r cac7a527140e097e6e7f1f5d0fdcf588a8183ece lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1521,11 +1521,21 @@
def remove_dir( dir ):
"""Attempt to remove a directory from disk."""
- if os.path.exists( dir ):
- try:
- shutil.rmtree( dir )
- except:
- pass
+ if dir:
+ if os.path.exists( dir ):
+ try:
+ shutil.rmtree( dir )
+ except:
+ pass
+
+def remove_file( file_name ):
+ """Attempt to remove a file from disk."""
+ if file_name:
+ if os.path.exists( file_name ):
+ try:
+ os.remove( dir )
+ except:
+ pass
def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple ):
"""
diff -r ef33472f0aa296e55fd811357e09366f9cee5467 -r cac7a527140e097e6e7f1f5d0fdcf588a8183ece lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -7,6 +7,7 @@
import tool_shed.util.shed_util_common as suc
import tool_shed.repository_types.util as rt_util
from tool_shed.util import xml_util
+from tool_shed.galaxy_install.tool_dependencies import fabric_util
log = logging.getLogger( __name__ )
@@ -276,6 +277,18 @@
platform_dict[ 'architecture' ] = machine.lower()
return platform_dict
+def get_required_repository_package_env_sh_path( app, package_name, package_version, required_repository ):
+ """Return path to env.sh file in required repository if the required repository has been installed."""
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ return env_sh_file_path
+
def get_tool_dependency( trans, id ):
"""Get a tool_dependency from the database via id"""
return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
@@ -368,6 +381,18 @@
sa_session.flush()
return tool_dependency
+def mark_tool_dependency_installed( app, tool_dependency ):
+ if tool_dependency.status not in [ app.model.ToolDependency.installation_status.ERROR,
+ app.model.ToolDependency.installation_status.INSTALLED ]:
+ log.debug( 'Changing status for tool dependency %s from %s to %s.' % \
+ ( str( tool_dependency.name ), str( tool_dependency.status ), str( app.model.ToolDependency.installation_status.INSTALLED ) ) )
+ tool_dependency = set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=app.model.ToolDependency.installation_status.INSTALLED,
+ error_message=None,
+ remove_from_disk=False )
+ return tool_dependency
+
def merge_missing_tool_dependencies_to_installed_container( containers_dict ):
""" Merge the list of missing tool dependencies into the list of installed tool dependencies."""
missing_td_container_root = containers_dict.get( 'missing_tool_dependencies', None )
@@ -491,6 +516,72 @@
sa_session.flush()
return tool_dependency
+def sync_database_with_file_system( app, tool_shed_repository, tool_dependency_name, tool_dependency_version, tool_dependency_install_dir,
+ tool_dependency_type='package' ):
+ """
+ The installation directory defined by the received tool_dependency_install_dir exists, so check for the presence
+ of fabric_util.INSTALLATION_LOG. If the files exists, we'll assume the tool dependency is installed, but not
+ necessarily successfully (it could be in an error state on disk. However, we can justifiably assume here that no
+ matter the state, an associated database record will exist.
+ """
+ # This method should be reached very rarely. It implies that either the Galaxy environment became corrupted (i.e.,
+ # the database records for installed tool dependencies is not synchronized with tool dependencies on disk) or the Tool
+ # Shed's install and test framework is running.
+ sa_session = app.model.context.current
+ can_install_tool_dependency = False
+ tool_dependency = get_tool_dependency_by_name_version_type_repository( app,
+ tool_shed_repository,
+ tool_dependency_name,
+ tool_dependency_version,
+ tool_dependency_type )
+ if tool_dependency.status == app.model.ToolDependency.installation_status.INSTALLING:
+ # The tool dependency is in an Installing state, so we don't want to do anything to it. If the tool
+ # dependency is being installed by someone else, we don't want to interfere with that. This assumes
+ # the installation by "someone else" is not hung in an Installing state, which is a weakness if that
+ # "someone else" never repaired it.
+ log.debug( 'Skipping installation of tool dependency %s version %s because it has a status of %s' % \
+ ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency.status ) ) )
+ else:
+ # We have a pre-existing installation directory on the file system, but our associated database record is
+ # in a state that allowed us to arrive here - see the comment in common_install_util.handle_tool_dependencies().
+ # At this point, we'll inspect the installation directory to see if we have a "proper installation" and
+ # if so, synchronize the database record rather than reinstalling the dependency if we're "running_functional_tests".
+ # If we're not "running_functional_tests, we'll set the tool dependency's installation status to ERROR.
+ tool_dependency_installation_directory_contents = os.listdir( tool_dependency_install_dir )
+ if fabric_util.INSTALLATION_LOG in tool_dependency_installation_directory_contents:
+ # Since this tool dependency's installation directory contains an installation log, we consider it to be
+ # installed. In some cases the record may be missing from the database due to some activity outside of
+ # the control of the Tool Shed. Since a new record was created for it and we don't know the state of the
+ # files on disk, we will set it to an error state (unless we are running Tool Shed functional tests - see
+ # below).
+ log.debug( 'Skipping installation of tool dependency %s version %s because it is installed in %s' % \
+ ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency_install_dir ) ) )
+ if app.config.running_functional_tests:
+ # If we are running functional tests, the state will be set to Installed because previously compiled
+ # tool dependencies are not deleted by default, from the "install and test" framework..
+ tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
+ else:
+ error_message = 'The installation directory for this tool dependency had contents, but the database had no record. '
+ error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the '
+ error_message += 'missing database record, it is automatically set to Error.'
+ tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
+ tool_dependency.error_message = error_message
+ else:
+ error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \
+ ( str( tool_dependency_install_dir ),
+ str( tool_dependency_name ),
+ str( tool_dependency_version ),
+ str( fabric_util.INSTALLATION_LOG ) )
+ error_message += ' is missing. This indicates an installation error so the tool dependency is being'
+ error_message += ' prepared for re-installation.'
+ print error_message
+ tool_dependency.status = app.model.ToolDependency.installation_status.NEVER_INSTALLED
+ suc.remove_dir( tool_dependency_install_dir )
+ can_install_tool_dependency = True
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ return tool_dependency, can_install_tool_dependency
+
def tool_dependency_is_orphan( type, name, version, tools ):
"""
Determine if the combination of the received type, name and version is defined in the <requirement> tag for at least one tool in the
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0b2ecfd14689/
Changeset: 0b2ecfd14689
User: jmchilton
Date: 2013-12-02 21:57:00
Summary: Tell nose functions in lib/galaxy/tools/test.py are not unit tests.
Affected #: 1 file
diff -r cf0d0001893cabb08a01a6d8938f3aa24be4cada -r 0b2ecfd1468979ea94b00c12eaafcac6f330b37e lib/galaxy/tools/test.py
--- a/lib/galaxy/tools/test.py
+++ b/lib/galaxy/tools/test.py
@@ -4,6 +4,10 @@
from parameters import basic
from parameters import grouping
from galaxy.util import string_as_bool
+try:
+ from nose.tools import nottest
+except ImportError:
+ nottest = lambda x: x
import logging
log = logging.getLogger( __name__ )
@@ -14,6 +18,7 @@
DEFAULT_MAX_SECS = 120
+@nottest
def parse_tests_elem(tool, tests_elem):
"""
Build ToolTestBuilder objects for each "<test>" elements and
@@ -196,6 +201,7 @@
return require_file( name, value, extra, self.required_files )
+@nottest
def test_data_iter( required_files ):
for fname, extra in required_files:
data_dict = dict(
https://bitbucket.org/galaxy/galaxy-central/commits/ef33472f0aa2/
Changeset: ef33472f0aa2
User: jmchilton
Date: 2013-12-02 21:57:00
Summary: Fix test case in test/unit/test_tool_deps.py which fails if modulecmd is unavailable.
Affected #: 1 file
diff -r 0b2ecfd1468979ea94b00c12eaafcac6f330b37e -r ef33472f0aa296e55fd811357e09366f9cee5467 test/unit/test_tool_deps.py
--- a/test/unit/test_tool_deps.py
+++ b/test/unit/test_tool_deps.py
@@ -293,7 +293,7 @@
def test_config_module_defaults():
with __parse_resolvers('''<dependency_resolvers>
- <modules />
+ <modules prefetch="false" /></dependency_resolvers>
''') as dependency_resolvers:
module_resolver = dependency_resolvers[0]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for missing eggs.require in lib/galaxy/exceptions/__init__.py.
by commits-noreply@bitbucket.org 02 Dec '13
by commits-noreply@bitbucket.org 02 Dec '13
02 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cf0d0001893c/
Changeset: cf0d0001893c
User: dan
Date: 2013-12-02 20:48:30
Summary: Fix for missing eggs.require in lib/galaxy/exceptions/__init__.py.
Affected #: 1 file
diff -r cd94f9721265741d448a225df511141190049f34 -r cf0d0001893cabb08a01a6d8938f3aa24be4cada lib/galaxy/exceptions/__init__.py
--- a/lib/galaxy/exceptions/__init__.py
+++ b/lib/galaxy/exceptions/__init__.py
@@ -1,6 +1,10 @@
"""
Custom exceptions for Galaxy
"""
+
+from galaxy import eggs
+eggs.require( "Paste" )
+
from paste import httpexceptions
class MessageException( Exception ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/44acfec11d87/
Changeset: 44acfec11d87
User: jmchilton
Date: 2013-11-27 17:12:48
Summary: Patch from Peter Cock. Allow overriding composite type main file in archives.
Affected #: 1 file
diff -r 372ff609235643994e50f1502fb9c73d37a3f97f -r 44acfec11d87f2efc38bccb2b3333e86b9d96667 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -199,6 +199,29 @@
out = "Can't create peek %s" % str( exc )
return out
+ def _archive_main_file(self, archive, outfname, data_filename):
+ """Called from _archive_composite_dataset to add central file to archive.
+
+ Unless subclassed, this will add the main dataset file (argument data_filename)
+ to the archive, as an HTML file with its filename derived from the dataset name
+ (argument outfname).
+
+ Returns a tuple of boolean, string, string: (error, msg, messagetype)
+ """
+ error, msg, messagetype = False, "", ""
+ htmlname = os.path.splitext(outfname)[0]
+ if not htmlname.endswith(ext):
+ htmlname = '%s_%s' % (htmlname, ext)
+ archname = '%s.html' % htmlname # fake the real nature of the html file
+ try:
+ archive.add(data_filename, archname)
+ except IOError:
+ error = True
+ log.exception("Unable to add composite parent %s to temporary library download archive" % data_filename)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = "error"
+ return error, msg, messagetype
+
def _archive_composite_dataset( self, trans, data=None, **kwd ):
# save a composite object into a compressed archive for downloading
params = util.Params( kwd )
@@ -233,33 +256,25 @@
messagetype = 'error'
if not error:
current_user_roles = trans.get_current_user_roles()
- ext = data.extension
path = data.file_name
fname = os.path.split(path)[-1]
efp = data.extra_files_path
- htmlname = os.path.splitext(outfname)[0]
- if not htmlname.endswith(ext):
- htmlname = '%s_%s' % (htmlname,ext)
- archname = '%s.html' % htmlname # fake the real nature of the html file
- try:
- archive.add(data.file_name,archname)
- except IOError:
- error = True
- log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
- msg = "Unable to create archive for download, please report this error"
- messagetype = 'error'
- for root, dirs, files in os.walk(efp):
- for fname in files:
- fpath = os.path.join(root,fname)
- rpath = os.path.relpath(fpath,efp)
- try:
- archive.add( fpath,rpath )
- except IOError:
- error = True
- log.exception( "Unable to add %s to temporary library download archive" % rpath)
- msg = "Unable to create archive for download, please report this error"
- messagetype = 'error'
- continue
+ #Add any central file to the archive,
+ error, msg, messagetype = self._archive_main_file(archive, outfname, path)
+ if not error:
+ #Add any child files to the archive,
+ for root, dirs, files in os.walk(efp):
+ for fname in files:
+ fpath = os.path.join(root,fname)
+ rpath = os.path.relpath(fpath,efp)
+ try:
+ archive.add( fpath,rpath )
+ except IOError:
+ error = True
+ log.exception( "Unable to add %s to temporary library download archive" % rpath)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = 'error'
+ continue
if not error:
if params.do_action == 'zip':
archive.close()
https://bitbucket.org/galaxy/galaxy-central/commits/cd94f9721265/
Changeset: cd94f9721265
User: jmchilton
Date: 2013-12-02 19:40:43
Summary: Fix default behavior of composite archival broken with previous changeset.
Other small tweaks this implementation.
Affected #: 1 file
diff -r 44acfec11d87f2efc38bccb2b3333e86b9d96667 -r cd94f9721265741d448a225df511141190049f34 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -199,7 +199,7 @@
out = "Can't create peek %s" % str( exc )
return out
- def _archive_main_file(self, archive, outfname, data_filename):
+ def _archive_main_file(self, archive, display_name, data_filename):
"""Called from _archive_composite_dataset to add central file to archive.
Unless subclassed, this will add the main dataset file (argument data_filename)
@@ -209,10 +209,7 @@
Returns a tuple of boolean, string, string: (error, msg, messagetype)
"""
error, msg, messagetype = False, "", ""
- htmlname = os.path.splitext(outfname)[0]
- if not htmlname.endswith(ext):
- htmlname = '%s_%s' % (htmlname, ext)
- archname = '%s.html' % htmlname # fake the real nature of the html file
+ archname = '%s.html' % display_name # fake the real nature of the html file
try:
archive.add(data_filename, archname)
except IOError:
@@ -256,11 +253,17 @@
messagetype = 'error'
if not error:
current_user_roles = trans.get_current_user_roles()
+ ext = data.extension
path = data.file_name
fname = os.path.split(path)[-1]
efp = data.extra_files_path
#Add any central file to the archive,
- error, msg, messagetype = self._archive_main_file(archive, outfname, path)
+
+ display_name = os.path.splitext(outfname)[0]
+ if not display_name.endswith(ext):
+ display_name = '%s_%s' % (display_name, ext)
+
+ error, msg, messagetype = self._archive_main_file(archive, display_name, path)
if not error:
#Add any child files to the archive,
for root, dirs, files in os.walk(efp):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Grids: Fix unescaping strings
by commits-noreply@bitbucket.org 02 Dec '13
by commits-noreply@bitbucket.org 02 Dec '13
02 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/372ff6092356/
Changeset: 372ff6092356
User: guerler
Date: 2013-12-02 19:07:17
Summary: Grids: Fix unescaping strings
Affected #: 1 file
diff -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 -r 372ff609235643994e50f1502fb9c73d37a3f97f templates/grid_base.mako
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -403,9 +403,10 @@
value = column_settings['value']
inbound = column_settings['inbound']
- # check if formatting is defined
- value = str(value).replace('//', '/')
-
+ # unescape value
+ if isinstance(value, unicode):
+ value = value.replace('//', '/')
+
# Attach popup menu?
id = ""
cls = ""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Update LWR client through LWR revision acecc74f7016.
by commits-noreply@bitbucket.org 01 Dec '13
by commits-noreply@bitbucket.org 01 Dec '13
01 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/752929cea6d3/
Changeset: 752929cea6d3
User: jmchilton
Date: 2013-12-01 13:14:32
Summary: Update LWR client through LWR revision acecc74f7016.
Mostly small modifications related Python 3 compat. (no iteritems, diff. StringIO, exception syntax, urllib diffs,) and improved handling of unicode data.
Affected #: 6 files
diff -r 9a5a8d18cd16ed3331ba996de21ed4d4dfa93da9 -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 lib/galaxy/jobs/runners/lwr_client/action_mapper.py
--- a/lib/galaxy/jobs/runners/lwr_client/action_mapper.py
+++ b/lib/galaxy/jobs/runners/lwr_client/action_mapper.py
@@ -21,7 +21,7 @@
>>> from tempfile import NamedTemporaryFile
>>> from os import unlink
>>> f = NamedTemporaryFile(delete=False)
- >>> f.write(json_string)
+ >>> write_result = f.write(json_string.encode('UTF-8'))
>>> f.close()
>>> class MockClient():
... default_file_action = 'none'
@@ -30,23 +30,23 @@
>>> mapper = FileActionMapper(MockClient())
>>> unlink(f.name)
>>> # Test first config line above, implicit path prefix mapper
- >>> mapper.action('/opt/galaxy/tools/filters/catWrapper.py', 'input')
- ('none',)
+ >>> mapper.action('/opt/galaxy/tools/filters/catWrapper.py', 'input')[0] == u'none'
+ True
>>> # Test another (2nd) mapper, this one with a different action
- >>> mapper.action('/galaxy/data/files/000/dataset_1.dat', 'input')
- ('transfer',)
+ >>> mapper.action('/galaxy/data/files/000/dataset_1.dat', 'input')[0] == u'transfer'
+ True
>>> # Always at least copy work_dir outputs.
- >>> mapper.action('/opt/galaxy/database/working_directory/45.sh', 'work_dir')
- ('copy',)
+ >>> mapper.action('/opt/galaxy/database/working_directory/45.sh', 'work_dir')[0] == u'copy'
+ True
>>> # Test glob mapper (matching test)
- >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam', 'input')
- ('copy',)
+ >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam', 'input')[0] == u'copy'
+ True
>>> # Test glob mapper (non-matching test)
- >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam.bai', 'input')
- ('none',)
+ >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam.bai', 'input')[0] == u'none'
+ True
>>> # Regex mapper test.
- >>> mapper.action('/old/galaxy/data/dataset_10245.dat', 'input')
- ('copy',)
+ >>> mapper.action('/old/galaxy/data/dataset_10245.dat', 'input')[0] == u'copy'
+ True
"""
def __init__(self, client):
diff -r 9a5a8d18cd16ed3331ba996de21ed4d4dfa93da9 -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 lib/galaxy/jobs/runners/lwr_client/destination.py
--- a/lib/galaxy/jobs/runners/lwr_client/destination.py
+++ b/lib/galaxy/jobs/runners/lwr_client/destination.py
@@ -51,9 +51,10 @@
>>> destination_params = {"private_token": "12345", "submit_native_specification": "-q batch"}
>>> result = submit_params(destination_params)
- >>> result.items()
- [('native_specification', '-q batch')]
+ >>> result
+ {'native_specification': '-q batch'}
"""
- return dict([(key[len(SUBMIT_PREFIX):], value)
- for key, value in (destination_params or {}).iteritems()
+ destination_params = destination_params or {}
+ return dict([(key[len(SUBMIT_PREFIX):], destination_params[key])
+ for key in destination_params
if key.startswith(SUBMIT_PREFIX)])
diff -r 9a5a8d18cd16ed3331ba996de21ed4d4dfa93da9 -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 lib/galaxy/jobs/runners/lwr_client/manager.py
--- a/lib/galaxy/jobs/runners/lwr_client/manager.py
+++ b/lib/galaxy/jobs/runners/lwr_client/manager.py
@@ -5,8 +5,18 @@
from queue import Queue
from threading import Thread
from os import getenv
-from urllib import urlencode
-from StringIO import StringIO
+try:
+ from urllib import urlencode
+except ImportError:
+ from urllib.parse import urlencode
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+try:
+ from six import text_type
+except ImportError:
+ from galaxy.util import unicodify as text_type
from .client import Client, InputCachingClient
from .transport import get_transport
@@ -27,10 +37,10 @@
"""
def __init__(self, **kwds):
if 'job_manager' in kwds:
- self.job_manager_interface_class = LocalJobManagerInterface
+ self.job_manager_interface_class = LocalLwrInterface
self.job_manager_interface_args = dict(job_manager=kwds['job_manager'], file_cache=kwds['file_cache'])
else:
- self.job_manager_interface_class = HttpJobManagerInterface
+ self.job_manager_interface_class = HttpLwrInterface
transport_type = kwds.get('transport_type', None)
transport = get_transport(transport_type)
self.job_manager_interface_args = dict(transport=transport)
@@ -55,7 +65,11 @@
return self.client_class(destination_params, job_id, job_manager_interface, **self.extra_client_kwds)
def __parse_destination_params(self, destination_params):
- if isinstance(destination_params, str) or isinstance(destination_params, unicode):
+ try:
+ unicode_type = unicode
+ except NameError:
+ unicode_type = str
+ if isinstance(destination_params, str) or isinstance(destination_params, unicode_type):
destination_params = url_to_destination_params(destination_params)
return destination_params
@@ -76,7 +90,7 @@
"""
-class HttpJobManagerInterface(object):
+class HttpLwrInterface(object):
def __init__(self, destination_params, transport):
self.transport = transport
@@ -92,12 +106,13 @@
def __build_url(self, command, args):
if self.private_key:
args["private_key"] = self.private_key
- data = urlencode(args)
+ arg_bytes = dict([(k, text_type(args[k]).encode('utf-8')) for k in args])
+ data = urlencode(arg_bytes)
url = self.remote_host + command + "?" + data
return url
-class LocalJobManagerInterface(object):
+class LocalLwrInterface(object):
def __init__(self, destination_params, job_manager, file_cache):
self.job_manager = job_manager
@@ -113,6 +128,7 @@
}
def execute(self, command, args={}, data=None, input_path=None, output_path=None):
+ # If data set, should be unicode (on Python 2) or str (on Python 3).
from lwr import routes
from lwr.framework import build_func_args
controller = getattr(routes, command)
@@ -129,9 +145,9 @@
def __build_body(self, data, input_path):
if data is not None:
- return StringIO(data)
+ return BytesIO(data.encode('utf-8'))
elif input_path is not None:
- return open(input_path, 'r')
+ return open(input_path, 'rb')
else:
return None
@@ -188,4 +204,4 @@
int_val = int(val)
return int_val
-__all__ = [ClientManager, HttpJobManagerInterface]
+__all__ = [ClientManager, HttpLwrInterface]
diff -r 9a5a8d18cd16ed3331ba996de21ed4d4dfa93da9 -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 lib/galaxy/jobs/runners/lwr_client/stager.py
--- a/lib/galaxy/jobs/runners/lwr_client/stager.py
+++ b/lib/galaxy/jobs/runners/lwr_client/stager.py
@@ -1,6 +1,7 @@
from os.path import abspath, basename, join, exists
from os import listdir, sep
from re import findall
+from io import open
from .action_mapper import FileActionMapper
@@ -24,22 +25,24 @@
>>> import tempfile
>>> tf = tempfile.NamedTemporaryFile()
>>> def setup_inputs(tf):
- ... open(tf.name, "w").write("world /path/to/input the rest")
- ... inputs = JobInputs("hello /path/to/input", [tf.name])
+ ... open(tf.name, "w").write(u"world /path/to/input the rest")
+ ... inputs = JobInputs(u"hello /path/to/input", [tf.name])
... return inputs
>>> inputs = setup_inputs(tf)
- >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
- >>> inputs.rewritten_command_line
- 'hello C:\\\\input'
- >>> inputs.rewritten_config_files[tf.name]
- 'world C:\\\\input the rest'
+ >>> inputs.rewrite_paths(u"/path/to/input", u'C:\\input')
+ >>> inputs.rewritten_command_line == u'hello C:\\\\input'
+ True
+ >>> inputs.rewritten_config_files[tf.name] == u'world C:\\\\input the rest'
+ True
>>> tf.close()
>>> tf = tempfile.NamedTemporaryFile()
>>> inputs = setup_inputs(tf)
- >>> inputs.find_referenced_subfiles('/path/to')
- ['/path/to/input']
+ >>> inputs.find_referenced_subfiles('/path/to') == [u'/path/to/input']
+ True
>>> inputs.path_referenced('/path/to')
True
+ >>> inputs.path_referenced(u'/path/to')
+ True
>>> inputs.path_referenced('/path/to/input')
True
>>> inputs.path_referenced('/path/to/notinput')
@@ -92,7 +95,7 @@
self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
def __rewrite_config_files(self, local_path, remote_path):
- for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
+ for config_file, rewritten_contents in self.rewritten_config_files.items():
self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
def __items(self):
@@ -140,7 +143,7 @@
For each file that has been transferred and renamed, updated
command_line and configfiles to reflect that rewrite.
"""
- for local_path, remote_path in self.file_renames.iteritems():
+ for local_path, remote_path in self.file_renames.items():
self.job_inputs.rewrite_paths(local_path, remote_path)
def __action(self, path, type):
@@ -283,7 +286,7 @@
self.transfer_tracker.rewrite_input_paths()
def __upload_rewritten_config_files(self):
- for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
+ for config_file, new_config_contents in self.job_inputs.rewritten_config_files.items():
self.client.put_file(config_file, input_type='config', contents=new_config_contents)
def get_rewritten_command_line(self):
@@ -304,7 +307,7 @@
try:
action = action_mapper.action(output_file, 'output')
client.fetch_work_dir_output(source_file, working_directory, output_file, action[0])
- except Exception, e:
+ except Exception as e:
download_failure_exceptions.append(e)
# Remove from full output_files list so don't try to download directly.
output_files.remove(output_file)
@@ -312,7 +315,7 @@
try:
action = action_mapper.action(output_file, 'output')
client.fetch_output(output_file, working_directory=working_directory, action=action[0])
- except Exception, e:
+ except Exception as e:
download_failure_exceptions.append(e)
return __clean(download_failure_exceptions, cleanup_job, client)
@@ -340,9 +343,9 @@
def _read(path):
"""
Utility method to quickly read small files (config files and tool
- wrappers) into memory as strings.
+ wrappers) into memory as bytes.
"""
- input = open(path, "r")
+ input = open(path, "r", encoding="utf-8")
try:
return input.read()
finally:
diff -r 9a5a8d18cd16ed3331ba996de21ed4d4dfa93da9 -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 lib/galaxy/jobs/runners/lwr_client/transport/curl.py
--- a/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
@@ -1,4 +1,7 @@
-from cStringIO import StringIO
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
try:
from pycurl import Curl
except:
@@ -25,6 +28,8 @@
c.setopt(c.INFILESIZE, filesize)
if data:
c.setopt(c.POST, 1)
+ if type(data).__name__ == 'unicode':
+ data = data.encode('UTF-8')
c.setopt(c.POSTFIELDS, data)
c.perform()
if not output_path:
diff -r 9a5a8d18cd16ed3331ba996de21ed4d4dfa93da9 -r 752929cea6d37423c14f46f317ef3815cfc3c3d0 lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- a/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -3,16 +3,23 @@
"""
from __future__ import with_statement
import mmap
-import urllib2
+try:
+ from urllib2 import urlopen
+except ImportError:
+ from urllib.request import urlopen
+try:
+ from urllib2 import Request
+except ImportError:
+ from urllib.request import Request
class Urllib2Transport(object):
def _url_open(self, request, data):
- return urllib2.urlopen(request, data)
+ return urlopen(request, data)
def execute(self, url, data=None, input_path=None, output_path=None):
- request = urllib2.Request(url=url, data=data)
+ request = Request(url=url, data=data)
input = None
try:
if input_path:
@@ -26,7 +33,7 @@
with open(output_path, 'wb') as output:
while True:
buffer = response.read(1024)
- if buffer == "":
+ if not buffer:
break
output.write(buffer)
return response
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0