commit/galaxy-central: jmchilton: Temporary config option to isolate tool commands in their own shell.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/2911cf250e75/ Changeset: 2911cf250e75 User: jmchilton Date: 2015-02-02 14:27:19+00:00 Summary: Temporary config option to isolate tool commands in their own shell. Like done for Docker to isolate metadata commands from the environment modifications required to resolve tool dependencies. Should allow for Python 3 dependencies (originally also allowed samtools - but Nate other commit resolved that problem also). Just meant as a config option for now - it will become the default once tested more thoroughly. For now enable it by setting enable_beta_tool_command_isolation to True in galaxy.ini. Affected #: 6 files diff -r 0d92ab68b8b3764e98d338c7d594b8e77aaf5b99 -r 2911cf250e75c953700e19f2e02b4f7f6f6c7f02 config/galaxy.ini.sample --- a/config/galaxy.ini.sample +++ b/config/galaxy.ini.sample @@ -790,6 +790,11 @@ # Enable Galaxy to communicate directly with a sequencer #enable_sequencer_communication = False +# Separate tool command from rest of job script so tool dependencies +# don't interfer with metadata generation (this will be the default +# in a future release). +#enable_beta_tool_command_isolation = False + # Enable beta workflow modules that should not yet be considered part of Galaxy's # stable API. diff -r 0d92ab68b8b3764e98d338c7d594b8e77aaf5b99 -r 2911cf250e75c953700e19f2e02b4f7f6f6c7f02 lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -193,6 +193,7 @@ # Tasked job runner. self.use_tasked_jobs = string_as_bool( kwargs.get( 'use_tasked_jobs', False ) ) self.local_task_queue_workers = int(kwargs.get("local_task_queue_workers", 2)) + self.commands_in_new_shell = string_as_bool( kwargs.get( 'enable_beta_tool_command_isolation', "False" ) ) # The transfer manager and deferred job queue self.enable_beta_job_managers = string_as_bool( kwargs.get( 'enable_beta_job_managers', 'False' ) ) # These workflow modules should not be considered part of Galaxy's diff -r 0d92ab68b8b3764e98d338c7d594b8e77aaf5b99 -r 2911cf250e75c953700e19f2e02b4f7f6f6c7f02 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -765,6 +765,10 @@ def get_parallelism(self): return self.tool.parallelism + @property + def commands_in_new_shell(self): + return self.app.config.commands_in_new_shell + # legacy naming get_job_runner = get_job_runner_url diff -r 0d92ab68b8b3764e98d338c7d594b8e77aaf5b99 -r 2911cf250e75c953700e19f2e02b4f7f6f6c7f02 lib/galaxy/jobs/command_factory.py --- a/lib/galaxy/jobs/command_factory.py +++ b/lib/galaxy/jobs/command_factory.py @@ -5,6 +5,7 @@ CAPTURE_RETURN_CODE = "return_code=$?" YIELD_CAPTURED_CODE = 'sh -c "exit $return_code"' +DEFAULT_SHELL = "/bin/sh" from logging import getLogger log = getLogger( __name__ ) @@ -45,28 +46,21 @@ if not container: __handle_dependency_resolution(commands_builder, job_wrapper, remote_command_params) - if container: - # Stop now and build command before handling metadata and copying - # working directory files back. These should always happen outside - # of docker container - no security implications when generating - # metadata and means no need for Galaxy to be available to container - # and not copying workdir outputs back means on can be more restrictive - # of where container can write to in some circumstances. - - local_container_script = join( job_wrapper.working_directory, "container.sh" ) - fh = file( local_container_script, "w" ) - fh.write( "#!/bin/sh\n%s" % commands_builder.build() ) - fh.close() - chmod( local_container_script, 0755 ) - - compute_container_script = local_container_script - if 'working_directory' in remote_command_params: - compute_container_script = "/bin/sh %s" % join(remote_command_params['working_directory'], "container.sh") - - run_in_container_command = container.containerize_command( - compute_container_script - ) - commands_builder = CommandsBuilder( run_in_container_command ) + if container or job_wrapper.commands_in_new_shell: + externalized_commands = __externalize_commands(job_wrapper, commands_builder, remote_command_params) + if container: + # Stop now and build command before handling metadata and copying + # working directory files back. These should always happen outside + # of docker container - no security implications when generating + # metadata and means no need for Galaxy to be available to container + # and not copying workdir outputs back means on can be more restrictive + # of where container can write to in some circumstances. + run_in_container_command = container.containerize_command( + externalized_commands + ) + commands_builder = CommandsBuilder( run_in_container_command ) + else: + commands_builder = CommandsBuilder( externalized_commands ) if include_work_dir_outputs: __handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_command_params) @@ -77,6 +71,20 @@ return commands_builder.build() +def __externalize_commands(job_wrapper, commands_builder, remote_command_params, script_name="tool_script.sh"): + local_container_script = join( job_wrapper.working_directory, script_name ) + fh = file( local_container_script, "w" ) + fh.write( "#!%s\n%s" % (DEFAULT_SHELL, commands_builder.build())) + fh.close() + chmod( local_container_script, 0755 ) + + commands = local_container_script + if 'working_directory' in remote_command_params: + commands = "%s %s" % (DEFAULT_SHELL, join(remote_command_params['working_directory'], script_name)) + log.info("Built script [%s] for tool command[%s]" % (local_container_script, commands)) + return commands + + def __handle_version_command(commands_builder, job_wrapper): # Prepend version string write_version_cmd = job_wrapper.write_version_cmd diff -r 0d92ab68b8b3764e98d338c7d594b8e77aaf5b99 -r 2911cf250e75c953700e19f2e02b4f7f6f6c7f02 test/unit/jobs/test_command_factory.py --- a/test/unit/jobs/test_command_factory.py +++ b/test/unit/jobs/test_command_factory.py @@ -140,6 +140,7 @@ self.configured_external_metadata_kwds = None self.working_directory = "job1" self.prepare_input_files_cmds = None + self.commands_in_new_shell = False def get_command_line(self): return self.command_line diff -r 0d92ab68b8b3764e98d338c7d594b8e77aaf5b99 -r 2911cf250e75c953700e19f2e02b4f7f6f6c7f02 test/unit/jobs/test_runner_local.py --- a/test/unit/jobs/test_runner_local.py +++ b/test/unit/jobs/test_runner_local.py @@ -106,6 +106,7 @@ self.tool = tool self.state = model.Job.states.QUEUED self.command_line = "echo HelloWorld" + self.commands_in_new_shell = False self.prepare_called = False self.write_version_cmd = None self.dependency_shell_commands = None Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org