2 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/48ee04030f89/ Changeset: 48ee04030f89 User: jmchilton Date: 2014-05-13 07:13:02 Summary: Implement serializable class that can describe full dependency context for remote dependency resolution. Previously LWR jut serialized tool requirements, by transitioning to this class LWR should be able to resolve tool shed installed packages as well - requiring the additional repository and tool dependency context. Affected #: 2 files diff -r 3b29b6f83d17ed07d2c3ea49d839b29d66cad580 -r 48ee04030f893a9a9657b639bb38819fb4404388 lib/galaxy/tools/deps/dependencies.py --- /dev/null +++ b/lib/galaxy/tools/deps/dependencies.py @@ -0,0 +1,68 @@ +from galaxy.tools.deps.requirements import ToolRequirement +from galaxy.util import bunch + + +class DependenciesDescription(object): + """ Capture (in a readily serializable way) context related a tool + dependencies - both the tool's listed requirements and the tool shed + related context required to resolve dependencies via the + ToolShedPackageDependencyResolver. + + This is meant to enable remote resolution of dependencies, by the LWR or + other potential remote execution mechanisms. + """ + + def __init__(self, requirements=[], installed_tool_dependencies=[]): + self.requirements = requirements + # tool shed installed tool dependencies... + self.installed_tool_dependencies = installed_tool_dependencies + + def to_dict(self): + return dict( + requirements=[r.to_dict() for r in self.requirements], + installed_tool_dependencies=[DependenciesDescription._toolshed_install_dependency_to_dict(d) for d in self.installed_tool_dependencies] + ) + + @staticmethod + def from_dict(as_dict): + if as_dict is None: + return None + + requirements_dicts = as_dict.get('requirements', []) + requirements = [ToolRequirement.from_dict(r) for r in requirements_dicts] + installed_tool_dependencies_dicts = as_dict.get('installed_tool_dependencies', []) + installed_tool_dependencies = map(DependenciesDescription._toolshed_install_dependency_from_dict, installed_tool_dependencies_dicts) + return DependenciesDescription( + requirements=requirements, + installed_tool_dependencies=installed_tool_dependencies + ) + + @staticmethod + def _toolshed_install_dependency_from_dict(as_dict): + # Rather than requiring full models in LWR, just use simple objects + # containing only properties and associations used to resolve + # dependencies for tool execution. + repository_object = bunch.Bunch( + name=as_dict['repository_name'], + owner=as_dict['repository_owner'], + installed_changeset_revision=as_dict['repository_installed_changeset'], + ) + dependency_object = bunch.Bunch( + name=as_dict['dependency_name'], + version=as_dict['dependency_version'], + type=as_dict['dependency_type'], + tool_shed_repository=repository_object, + ) + return dependency_object + + @staticmethod + def _toolshed_install_dependency_to_dict(tool_dependency): + tool_shed_repository = tool_dependency.tool_shed_repository + return dict( + dependency_name=tool_dependency.name, + dependency_version=tool_dependency.version, + dependency_type=tool_dependency.type, + repository_name=tool_shed_repository.name, + repository_owner=tool_shed_repository.owner, + repository_installed_changeset=tool_shed_repository.installed_changeset_revision, + ) diff -r 3b29b6f83d17ed07d2c3ea49d839b29d66cad580 -r 48ee04030f893a9a9657b639bb38819fb4404388 test/unit/tools/test_tool_dependency_description.py --- /dev/null +++ b/test/unit/tools/test_tool_dependency_description.py @@ -0,0 +1,44 @@ +from galaxy.model import tool_shed_install +from galaxy.tools.deps import requirements +from galaxy.tools.deps import dependencies + + +def test_serialization(): + repository = tool_shed_install.ToolShedRepository( + owner="devteam", + name="tophat", + installed_changeset_revision="abcdefghijk", + ) + dependency = tool_shed_install.ToolDependency( + name="tophat", + version="2.0", + type="package", + status=tool_shed_install.ToolDependency.installation_status.INSTALLED, + ) + dependency.tool_shed_repository = repository + tool_requirement = requirements.ToolRequirement( + name="tophat", + version="2.0", + type="package", + ) + descript = dependencies.DependenciesDescription( + requirements=[tool_requirement], + installed_tool_dependencies=[dependency], + ) + result_descript = dependencies.DependenciesDescription.from_dict( + descript.to_dict() + ) + result_requirement = result_descript.requirements[0] + assert result_requirement.name == "tophat" + assert result_requirement.version == "2.0" + assert result_requirement.type == "package" + + result_tool_shed_dependency = result_descript.installed_tool_dependencies[0] + result_tool_shed_dependency.name = "tophat" + result_tool_shed_dependency.version = "2.0" + result_tool_shed_dependency.type = "package" + result_tool_shed_repository = result_tool_shed_dependency.tool_shed_repository + result_tool_shed_repository.name = "tophat" + result_tool_shed_repository.owner = "devteam" + result_tool_shed_repository.installed_changeset_revision = "abcdefghijk" + https://bitbucket.org/galaxy/galaxy-central/commits/24b5759f33f8/ Changeset: 24b5759f33f8 User: jmchilton Date: 2014-05-13 07:13:02 Summary: Update LWR client through LWR changeset 9fb7fcb. Among other smaller fixes, this update causes the LWR client/runner to pass more tool dependency context through to the remote LWR server when `dependency_resolution` is set to `remote`. This additional context allows tool shed packages to be resolved remotely (i.e. allows proper use of ToolShedPackageDependencyResolver on the remote LWR server). It remains the responsibility of the deployer to ensure identical versions of tool shed packages are installed on the remote LWR server and the local Galaxy instance. Affected #: 11 files diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr.py --- a/lib/galaxy/jobs/runners/lwr.py +++ b/lib/galaxy/jobs/runners/lwr.py @@ -5,6 +5,7 @@ from galaxy.jobs import ComputeEnvironment from galaxy.jobs import JobDestination from galaxy.jobs.command_factory import build_command +from galaxy.tools.deps import dependencies from galaxy.util import string_as_bool_or_none from galaxy.util.bunch import Bunch @@ -118,9 +119,7 @@ return try: - dependency_resolution = LwrJobRunner.__dependency_resolution( client ) - remote_dependency_resolution = dependency_resolution == "remote" - requirements = job_wrapper.tool.requirements if remote_dependency_resolution else [] + dependencies_description = LwrJobRunner.__dependencies_description( client, job_wrapper ) rewrite_paths = not LwrJobRunner.__rewrite_parameters( client ) unstructured_path_rewrites = {} if compute_environment: @@ -133,7 +132,7 @@ working_directory=job_wrapper.working_directory, tool=job_wrapper.tool, config_files=job_wrapper.extra_filenames, - requirements=requirements, + dependencies_description=dependencies_description, env=client.env, rewrite_paths=rewrite_paths, arbitrary_files=unstructured_path_rewrites, @@ -376,6 +375,19 @@ return client_outputs @staticmethod + def __dependencies_description( lwr_client, job_wrapper ): + dependency_resolution = LwrJobRunner.__dependency_resolution( lwr_client ) + remote_dependency_resolution = dependency_resolution == "remote" + if not remote_dependency_resolution: + return None + requirements = job_wrapper.tool.requirements or [] + installed_tool_dependencies = job_wrapper.tool.installed_tool_dependencies or [] + return dependencies.DependenciesDescription( + requirements=requirements, + installed_tool_dependencies=installed_tool_dependencies, + ) + + @staticmethod def __dependency_resolution( lwr_client ): dependency_resolution = lwr_client.destination_params.get( "dependency_resolution", "local" ) if dependency_resolution not in ["none", "local", "remote"]: diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/__init__.py --- a/lib/galaxy/jobs/runners/lwr_client/__init__.py +++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py @@ -4,6 +4,39 @@ This module contains logic for interfacing with an external LWR server. +------------------ +Configuring Galaxy +------------------ + +Galaxy job runners are configured in Galaxy's ``job_conf.xml`` file. See ``job_conf.xml.sample_advanced`` +in your Galaxy code base or on +`Bitbucket <https://bitbucket.org/galaxy/galaxy-dist/src/tip/job_conf.xml.sample_advanced?at=default>`_ +for information on how to configure Galaxy to interact with the LWR. + +Galaxy also supports an older, less rich configuration of job runners directly +in its main ``universe_wsgi.ini`` file. The following section describes how to +configure Galaxy to communicate with the LWR in this legacy mode. + +Legacy +------ + +A Galaxy tool can be configured to be executed remotely via LWR by +adding a line to the ``universe_wsgi.ini`` file under the +``galaxy:tool_runners`` section with the format:: + + <tool_id> = lwr://http://<lwr_host>:<lwr_port> + +As an example, if a host named remotehost is running the LWR server +application on port ``8913``, then the tool with id ``test_tool`` can +be configured to run remotely on remotehost by adding the following +line to ``universe.ini``:: + + test_tool = lwr://http://remotehost:8913 + +Remember this must be added after the ``[galaxy:tool_runners]`` header +in the ``universe.ini`` file. + + """ from .staging.down import finish_job diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/action_mapper.py --- a/lib/galaxy/jobs/runners/lwr_client/action_mapper.py +++ b/lib/galaxy/jobs/runners/lwr_client/action_mapper.py @@ -166,8 +166,8 @@ action_type = mapper.action_type file_lister = mapper.file_lister if type in ["workdir", "output_workdir"] and action_type == "none": - ## We are changing the working_directory relative to what - ## Galaxy would use, these need to be copied over. + # We are changing the working_directory relative to what + # Galaxy would use, these need to be copied over. action_type = "copy" action_class = actions.get(action_type, None) if action_class is None: diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/amqp_exchange.py --- a/lib/galaxy/jobs/runners/lwr_client/amqp_exchange.py +++ b/lib/galaxy/jobs/runners/lwr_client/amqp_exchange.py @@ -13,8 +13,8 @@ DEFAULT_EXCHANGE_NAME = "lwr" DEFAULT_EXCHANGE_TYPE = "direct" -DEFAULT_TIMEOUT = 0.2 # Set timeout to periodically give up looking and check - # if polling should end. +# Set timeout to periodically give up looking and check if polling should end. +DEFAULT_TIMEOUT = 0.2 class LwrExchange(object): @@ -48,7 +48,6 @@ queue = self.__queue(queue_name) with self.connection(self.__url, **connection_kwargs) as connection: with kombu.Consumer(connection, queues=[queue], callbacks=[callback], accept=['json']): - log.debug("Consuming queue %s" % queue) while check: try: connection.drain_events(timeout=self.__timeout) @@ -59,7 +58,6 @@ with self.connection(self.__url) as connection: with pools.producers[connection].acquire() as producer: key = self.__queue_name(name) - log.debug("Publishing with key %s and payload %s" % (key, payload)) producer.publish( payload, serializer='json', diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/client.py --- a/lib/galaxy/jobs/runners/lwr_client/client.py +++ b/lib/galaxy/jobs/runners/lwr_client/client.py @@ -39,9 +39,8 @@ ) else: job_directory = None - self.env = destination_params.get( "env", [] ) + self.env = destination_params.get("env", []) self.files_endpoint = destination_params.get("files_endpoint", None) - self.env = destination_params.get("env", []) self.job_directory = job_directory self.default_file_action = self.destination_params.get("default_file_action", "transfer") @@ -83,7 +82,7 @@ super(JobClient, self).__init__(destination_params, job_id) self.job_manager_interface = job_manager_interface - def launch(self, command_line, requirements=[], env=[], remote_staging=[], job_config=None): + def launch(self, command_line, dependencies_description=None, env=[], remote_staging=[], job_config=None): """ Queue up the execution of the supplied `command_line` on the remote server. Called launch for historical reasons, should be renamed to @@ -98,8 +97,8 @@ submit_params_dict = submit_params(self.destination_params) if submit_params_dict: launch_params['params'] = dumps(submit_params_dict) - if requirements: - launch_params['requirements'] = dumps([requirement.to_dict() for requirement in requirements]) + if dependencies_description: + launch_params['dependencies_description'] = dumps(dependencies_description.to_dict()) if env: launch_params['env'] = dumps(env) if remote_staging: @@ -248,8 +247,8 @@ return self._raw_execute(self._upload_file_action(args), args, contents, input_path) def _upload_file_action(self, args): - ## Hack for backward compatibility, instead of using new upload_file - ## path. Use old paths. + # Hack for backward compatibility, instead of using new upload_file + # path. Use old paths. input_type = args['input_type'] action = { # For backward compatibility just target upload_input_extra for all @@ -295,15 +294,15 @@ raise Exception(error_message) self.client_manager = client_manager - def launch(self, command_line, requirements=[], env=[], remote_staging=[], job_config=None): + def launch(self, command_line, dependencies_description=None, env=[], remote_staging=[], job_config=None): """ """ launch_params = dict(command_line=command_line, job_id=self.job_id) submit_params_dict = submit_params(self.destination_params) if submit_params_dict: launch_params['params'] = submit_params_dict - if requirements: - launch_params['requirements'] = [requirement.to_dict() for requirement in requirements] + if dependencies_description: + launch_params['dependencies_description'] = dependencies_description.to_dict() if env: launch_params['env'] = env if remote_staging: diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/destination.py --- a/lib/galaxy/jobs/runners/lwr_client/destination.py +++ b/lib/galaxy/jobs/runners/lwr_client/destination.py @@ -31,9 +31,9 @@ if not url.endswith("/"): url += "/" - ## Check for private token embedded in the URL. A URL of the form - ## https://moo@cow:8913 will try to contact https://cow:8913 - ## with a private key of moo + # Check for private token embedded in the URL. A URL of the form + # https://moo@cow:8913 will try to contact https://cow:8913 + # with a private key of moo private_token_format = "https?://(.*)@.*/?" private_token_match = match(private_token_format, url) private_token = None diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/interface.py --- a/lib/galaxy/jobs/runners/lwr_client/interface.py +++ b/lib/galaxy/jobs/runners/lwr_client/interface.py @@ -67,8 +67,8 @@ self.object_store = object_store def __app_args(self): - ## Arguments that would be specified from LwrApp if running - ## in web server. + # Arguments that would be specified from LwrApp if running + # in web server. return { 'manager': self.job_manager, 'file_cache': self.file_cache, diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/staging/__init__.py --- a/lib/galaxy/jobs/runners/lwr_client/staging/__init__.py +++ b/lib/galaxy/jobs/runners/lwr_client/staging/__init__.py @@ -30,8 +30,9 @@ be transferred to remote server). working_directory : str Local path created by Galaxy for running this job. - requirements : list - List of requirements for tool execution. + dependencies_description : list + galaxy.tools.deps.dependencies.DependencyDescription object describing + tool dependency context for remote depenency resolution. env: list List of dict object describing environment variables to populate. version_file : str @@ -55,7 +56,7 @@ input_files, client_outputs, working_directory, - requirements, + dependencies_description=None, env=[], arbitrary_files=None, rewrite_paths=True, @@ -66,7 +67,7 @@ self.input_files = input_files self.client_outputs = client_outputs self.working_directory = working_directory - self.requirements = requirements + self.dependencies_description = dependencies_description self.env = env self.rewrite_paths = rewrite_paths self.arbitrary_files = arbitrary_files or {} @@ -79,6 +80,15 @@ def version_file(self): return self.client_outputs.version_file + @property + def tool_dependencies(self): + if not self.remote_dependency_resolution: + return None + return dict( + requirements=(self.tool.requirements or []), + installed_tool_dependencies=(self.tool.installed_tool_dependencies or []) + ) + class ClientOutputs(object): """ Abstraction describing the output datasets EXPECTED by the Galaxy job diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/staging/up.py --- a/lib/galaxy/jobs/runners/lwr_client/staging/up.py +++ b/lib/galaxy/jobs/runners/lwr_client/staging/up.py @@ -24,7 +24,7 @@ job_id = file_stager.job_id launch_kwds = dict( command_line=rebuilt_command_line, - requirements=client_job_description.requirements, + dependencies_description=client_job_description.dependencies_description, env=client_job_description.env, ) if file_stager.job_config: diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/jobs/runners/lwr_client/transport/__init__.py --- a/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py +++ b/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py @@ -15,8 +15,8 @@ def __get_transport_type(transport_type, os_module): if not transport_type: use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0") - ## If LWR_CURL_TRANSPORT is unset or set to 0, use default, - ## else use curl. + # If LWR_CURL_TRANSPORT is unset or set to 0, use default, + # else use curl. if use_curl.isdigit() and not int(use_curl): transport_type = 'urllib' else: diff -r 48ee04030f893a9a9657b639bb38819fb4404388 -r 24b5759f33f87879e945a2f14ef5e8abfce488c1 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -2673,12 +2673,18 @@ def build_dependency_shell_commands( self ): """Return a list of commands to be run to populate the current environment to include this tools requirements.""" + return self.app.toolbox.dependency_manager.dependency_shell_commands( + self.requirements, + installed_tool_dependencies=self.installed_tool_dependencies + ) + + @property + def installed_tool_dependencies(self): if self.tool_shed_repository: installed_tool_dependencies = self.tool_shed_repository.tool_dependencies_installed_or_in_error else: installed_tool_dependencies = None - return self.app.toolbox.dependency_manager.dependency_shell_commands( self.requirements, - installed_tool_dependencies=installed_tool_dependencies ) + return installed_tool_dependencies def build_redirect_url_params( self, param_dict ): """ Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.