galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
May 2014
- 1 participants
- 242 discussions
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9a1415f8108f/
Changeset: 9a1415f8108f
User: greg
Date: 2014-05-15 00:13:33
Summary: Enhance the galaxy API for the Tool Shed to retrieve the latest installable revision from the Tool Shed from an installed repository, specifying a name and owner. Enhance the ~/scripts/api/install_tool_shed_repositories.py script to allow the changeset_revision to be optional, and if not specified, the latest installable revision will be installed for the repository specified by the name and owner.
Affected #: 3 files
diff -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f -r 9a1415f8108f6283181c7a5b564118920359decc lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -66,6 +66,50 @@
exported_workflows.append( display_dict )
return exported_workflows
+ @web.expose_api
+ def get_latest_installable_revision( self, trans, payload, **kwd ):
+ """
+ POST /api/tool_shed_repositories/get_latest_installable_revision
+ Get the latest installable revision of a specified repository from a specified Tool Shed.
+
+ :param key: the current Galaxy admin user's API key
+
+ The following parameters are included in the payload.
+ :param tool_shed_url (required): the base URL of the Tool Shed from which to retrieve the Repository revision.
+ :param name (required): the name of the Repository
+ :param owner (required): the owner of the Repository
+ """
+ # Get the information about the repository to be installed from the payload.
+ tool_shed_url = payload.get( 'tool_shed_url', '' )
+ if not tool_shed_url:
+ raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+ name = payload.get( 'name', '' )
+ if not name:
+ raise HTTPBadRequest( detail="Missing required parameter 'name'." )
+ owner = payload.get( 'owner', '' )
+ if not owner:
+ raise HTTPBadRequest( detail="Missing required parameter 'owner'." )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to request the latest installable revision for a repository in this Galaxy instance.' )
+ params = '?name=%s&owner=%s' % ( name, owner )
+ url = common_util.url_join( tool_shed_url,
+ 'api/repositories/get_ordered_installable_revisions%s' % params )
+ try:
+ raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ except Exception, e:
+ message = "Error attempting to retrieve the latest installable revision from tool shed %s for repository %s owned by %s: %s" % \
+ ( str( tool_shed_url ), str( name ), str( owner ), str( e ) )
+ log.debug( message )
+ return dict( status='error', error=message )
+ if raw_text:
+ # If successful, the response from get_ordered_installable_revisions will be a list of
+ # changeset_revision hash strings.
+ changeset_revisions = json.from_json_string( raw_text )
+ if len( changeset_revisions ) >= 1:
+ return changeset_revisions[ -1 ]
+ return suc.INITIAL_CHANGELOG_HASH
+
def __get_value_mapper( self, trans, tool_shed_repository ):
value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
'error_message' : tool_shed_repository.error_message or '' }
diff -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f -r 9a1415f8108f6283181c7a5b564118920359decc lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -321,7 +321,8 @@
'exported_workflows' : 'GET',
'import_workflow' : 'POST',
'import_workflows' : 'POST' },
- collection={ 'reset_metadata_on_installed_repositories' : 'POST' },
+ collection={ 'get_latest_installable_revision' : 'POST',
+ 'reset_metadata_on_installed_repositories' : 'POST' },
controller='tool_shed_repositories',
name_prefix='tool_shed_repository_',
path_prefix='/api',
diff -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f -r 9a1415f8108f6283181c7a5b564118920359decc scripts/api/install_tool_shed_repositories.py
--- a/scripts/api/install_tool_shed_repositories.py
+++ b/scripts/api/install_tool_shed_repositories.py
@@ -1,8 +1,12 @@
#!/usr/bin/env python
"""
-Install a specified repository revision from a specified tool shed into Galaxy. This example demonstrates installation of a repository that contains
-valid tools, loading them into a section of the Galaxy tool panel or creating a new tool panel section.
-You can choose if tool dependencies or repository dependencies should be installed, use --repository-deps or --tool-deps.
+If a repository name, owner and revision are specified,install the revision from a specified tool shed into Galaxy.
+Specifying a revision is optional, if it is no specified, the latest installable revision will automatically be installed.
+However, the name and owner are required.
+
+This example demonstrates installation of a repository that contains valid tools, loading them into a section of the
+Galaxy tool panel or creating a new tool panel section. You can choose if tool dependencies or repository dependencies
+should be installed, use --repository-deps or --tool-deps.
This example requires a tool panel config file (e.g., tool_conf.xml, shed_tool_conf.xml, etc) to contain a tool panel section like the following:
@@ -10,7 +14,7 @@
</section>
Here is a working example of how to use this script to install a repository from the test tool shed.
-./install_tool_shed_repositories.py --api <api key> --local <galaxy base url> --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --revision f28d5018f9cb --tool-deps
+./install_tool_shed_repositories.py --api <api key> --local <galaxy base url> --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --tool-deps
"""
import os
@@ -25,7 +29,20 @@
data[ 'tool_shed_url' ] = options.tool_shed_url
data[ 'name' ] = options.name
data[ 'owner' ] = options.owner
- data[ 'changeset_revision' ] = options.changeset_revision
+ if options.changeset_revision:
+ data[ 'changeset_revision' ] = options.changeset_revision
+ else:
+ # If the changeset_revision is not specified, default to the latest installable revision.
+ revision_data = {}
+ revision_data[ 'tool_shed_url' ] = options.tool_shed_url.rstrip( '/' )
+ revision_data[ 'name' ] = options.name
+ revision_data[ 'owner' ] = options.owner
+ revision_url = '%s%s' % ( options.local_url.rstrip( '/' ), '/api/tool_shed_repositories/get_latest_installable_revision' )
+ latest_installable_revision = submit( options.api,
+ revision_url,
+ revision_data,
+ return_formatted=False )
+ data[ 'changeset_revision' ] = latest_installable_revision
if options.tool_panel_section_id:
data[ 'tool_panel_section_id' ] = options.tool_panel_section_id
elif options.new_tool_panel_section_label:
@@ -34,7 +51,7 @@
data[ 'install_repository_dependencies' ] = options.install_repository_dependencies
if options.install_tool_dependencies:
data[ 'install_tool_dependencies' ] = options.install_tool_dependencies
- submit( options.api, '%s%s' % ( options.local_url.strip( '/' ), '/api/tool_shed_repositories/new/install_repository_revision' ), data )
+ submit( options.api, '%s%s' % ( options.local_url.rstrip( '/' ), '/api/tool_shed_repositories/new/install_repository_revision' ), data )
if __name__ == '__main__':
parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' )
@@ -43,7 +60,7 @@
parser.add_argument( "-l", "--local", dest="local_url", required=True, help="URL of the galaxy instance." )
parser.add_argument( "-n", "--name", required=True, help="Repository name." )
parser.add_argument( "-o", "--owner", required=True, help="Repository owner." )
- parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." )
+ parser.add_argument( "-r", "--revision", dest="changeset_revision", help="Repository revision." )
parser.add_argument( "--panel-section-id", dest="tool_panel_section_id", help="Tool panel section id if you want to add your repository to an existing tool section." )
parser.add_argument( "--panel-section-name", dest="new_tool_panel_section_label", help="New tool panel section label. If specified a new tool section will be created." )
parser.add_argument( "--repository-deps", dest="install_repository_dependencies", action="store_true", default=False, help="Install repository dependencies. [False]")
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Add a config option for controlling the AMQP consumer timeout, catch socket.error and attempt to recover from it.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/22ec0ac1ebcd/
Changeset: 22ec0ac1ebcd
User: natefoo
Date: 2014-05-14 23:34:15
Summary: Add a config option for controlling the AMQP consumer timeout, catch socket.error and attempt to recover from it.
Affected #: 4 files
diff -r a24a928c3b8c4e49bad89fb95496c6b1f78c23bc -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -51,6 +51,12 @@
<!-- <param id="amqp_connect_ssl_keyfile">/path/to/key.pem</param> --><!-- <param id="amqp_connect_ssl_certfile">/path/to/cert.pem</param> --><!-- <param id="amqp_connect_ssl_cert_reqs">cert_required</param> -->
+ <!-- By default, the AMQP consumer uses a nonblocking connection with
+ a 0.2 second timeout. In testing, this works fine for
+ unencrypted AMQP connections, but with SSL it will cause the
+ client to reconnect to the server after each timeout. Set to a
+ higher value (in seconds) (or `None` to use blocking connections). -->
+ <!-- <param id="amqp_consumer_timeout">None</param> --></plugin><plugin id="cli" type="runner" load="galaxy.jobs.runners.cli:ShellJobRunner" /><plugin id="condor" type="runner" load="galaxy.jobs.runners.condor:CondorJobRunner" />
diff -r a24a928c3b8c4e49bad89fb95496c6b1f78c23bc -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -47,10 +47,16 @@
self._init_monitor_thread()
self._init_worker_threads()
amqp_connect_ssl_args = {}
+ amqp_consumer_timeout = False
for kwd in kwds.keys():
if kwd.startswith('amqp_connect_ssl_'):
amqp_connect_ssl_args[kwd] = kwds[kwd]
client_manager_kwargs = {'transport_type': transport, 'cache': string_as_bool_or_none(cache), "url": url, 'amqp_connect_ssl_args': amqp_connect_ssl_args or None}
+ if 'amqp_consumer_timeout' in kwds:
+ if kwds['amqp_consumer_timeout'] == 'None':
+ client_manager_kwargs['amqp_consumer_timeout'] = None
+ else:
+ client_manager_kwargs['amqp_consumer_timeout'] = float(kwds['amqp_consumer_timeout'])
self.galaxy_url = galaxy_url
self.client_manager = build_client_manager(**client_manager_kwargs)
diff -r a24a928c3b8c4e49bad89fb95496c6b1f78c23bc -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f lib/galaxy/jobs/runners/lwr_client/amqp_exchange.py
--- a/lib/galaxy/jobs/runners/lwr_client/amqp_exchange.py
+++ b/lib/galaxy/jobs/runners/lwr_client/amqp_exchange.py
@@ -6,6 +6,7 @@
import socket
import logging
+from time import sleep
log = logging.getLogger(__name__)
@@ -47,13 +48,18 @@
def consume(self, queue_name, callback, check=True, connection_kwargs={}):
queue = self.__queue(queue_name)
- with self.connection(self.__url, ssl=self.__connect_ssl, **connection_kwargs) as connection:
- with kombu.Consumer(connection, queues=[queue], callbacks=[callback], accept=['json']):
- while check:
- try:
- connection.drain_events(timeout=self.__timeout)
- except socket.timeout:
- pass
+ while check:
+ try:
+ with self.connection(self.__url, ssl=self.__connect_ssl, **connection_kwargs) as connection:
+ with kombu.Consumer(connection, queues=[queue], callbacks=[callback], accept=['json']):
+ while check and connection.connected:
+ try:
+ connection.drain_events(timeout=self.__timeout)
+ except socket.timeout:
+ pass
+ except socket.error, exc:
+ log.warning('Got socket.error, will retry: %s', exc)
+ sleep(1)
def publish(self, name, payload):
with self.connection(self.__url, ssl=self.__connect_ssl) as connection:
diff -r a24a928c3b8c4e49bad89fb95496c6b1f78c23bc -r 22ec0ac1ebcd1cc5ae5fda4ec70723043e8d5c5f lib/galaxy/jobs/runners/lwr_client/manager.py
--- a/lib/galaxy/jobs/runners/lwr_client/manager.py
+++ b/lib/galaxy/jobs/runners/lwr_client/manager.py
@@ -78,7 +78,11 @@
self.url = kwds.get('url')
self.manager_name = kwds.get("manager", "_default_")
self.connect_ssl = parse_amqp_connect_ssl_params(kwds.get('amqp_connect_ssl_args', None))
- self.exchange = LwrExchange(self.url, self.manager_name, self.connect_ssl)
+ timeout = kwds.get('amqp_consumer_timeout', False)
+ if timeout is False:
+ self.exchange = LwrExchange(self.url, self.manager_name, self.connect_ssl)
+ else:
+ self.exchange = LwrExchange(self.url, self.manager_name, self.connect_ssl, timeout=timeout)
self.status_cache = {}
self.callback_lock = threading.Lock()
self.callback_thread = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for setting metadata when uploading directly to a library.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a24a928c3b8c/
Changeset: a24a928c3b8c
User: dan
Date: 2014-05-14 23:26:41
Summary: Fix for setting metadata when uploading directly to a library.
Affected #: 1 file
diff -r c9ad7f354f48f546e11879ba864b60c48d6d52f8 -r a24a928c3b8c4e49bad89fb95496c6b1f78c23bc lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1309,7 +1309,7 @@
config_file = self.app.config.config_file
if datatypes_config is None:
datatypes_config = self.app.datatypes_registry.integrated_datatypes_configs
- return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ],
+ return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets + job.output_library_datasets ],
self.sa_session,
exec_dir=exec_dir,
tmp_dir=tmp_dir,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Remove stray log statements in previous commit.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c9ad7f354f48/
Changeset: c9ad7f354f48
User: jmchilton
Date: 2014-05-14 23:13:32
Summary: Remove stray log statements in previous commit.
Affected #: 1 file
diff -r 170e2294712a2db7a703df8df62ace9a27fa1c9c -r c9ad7f354f48f546e11879ba864b60c48d6d52f8 lib/galaxy/workflow/extract.py
--- a/lib/galaxy/workflow/extract.py
+++ b/lib/galaxy/workflow/extract.py
@@ -99,8 +99,6 @@
raise AssertionError( "Attempt to create workflow with job not connected to current history" )
job = jobs_by_id[ job_id ]
tool_inputs, associations = step_inputs( trans, job )
- log.info("job %s has tool_inputs %s" % (job.id, tool_inputs) )
- log.info("associations are %s" % associations)
step = model.WorkflowStep()
step.type = 'tool'
step.tool_id = job.tool_id
@@ -114,7 +112,6 @@
input_collection = an_implicit_output_collection.find_implicit_input_collection( input_name )
if input_collection:
other_hid = input_collection.hid
- log.info("For input_name %s, have hid %s" % ( input_name, other_hid ) )
if other_hid in hid_to_output_pair:
other_step, other_name = hid_to_output_pair[ other_hid ]
conn = model.WorkflowStepConnection()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Bugfixes: More bug fixes for extracting collection-y workflows from a history.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/170e2294712a/
Changeset: 170e2294712a
User: jmchilton
Date: 2014-05-14 23:06:33
Summary: Bugfixes: More bug fixes for extracting collection-y workflows from a history.
The embarrassment continues...
Affected #: 1 file
diff -r c0eb8b472f3ccabdfd1b0396dbf92c34f3140200 -r 170e2294712a2db7a703df8df62ace9a27fa1c9c lib/galaxy/workflow/extract.py
--- a/lib/galaxy/workflow/extract.py
+++ b/lib/galaxy/workflow/extract.py
@@ -5,6 +5,7 @@
from galaxy import model
from galaxy.tools.parameters.basic import (
DataToolParameter,
+ DataCollectionToolParameter,
DrillDownSelectToolParameter,
SelectToolParameter,
UnvalidatedValue
@@ -98,6 +99,8 @@
raise AssertionError( "Attempt to create workflow with job not connected to current history" )
job = jobs_by_id[ job_id ]
tool_inputs, associations = step_inputs( trans, job )
+ log.info("job %s has tool_inputs %s" % (job.id, tool_inputs) )
+ log.info("associations are %s" % associations)
step = model.WorkflowStep()
step.type = 'tool'
step.tool_id = job.tool_id
@@ -111,6 +114,7 @@
input_collection = an_implicit_output_collection.find_implicit_input_collection( input_name )
if input_collection:
other_hid = input_collection.hid
+ log.info("For input_name %s, have hid %s" % ( input_name, other_hid ) )
if other_hid in hid_to_output_pair:
other_step, other_name = hid_to_output_pair[ other_hid ]
conn = model.WorkflowStepConnection()
@@ -190,7 +194,6 @@
# just grab the implicitly mapped jobs and handle in second pass. Second pass is
# needed because cannot allow selection of individual datasets from an implicit
# mapping during extraction - you get the collection or nothing.
- implicit_outputs = []
for content in self.history.active_contents:
if content.history_content_type == "dataset_collection":
hid = content.hid
@@ -200,34 +203,32 @@
job = DatasetCollectionCreationJob( content )
self.jobs[ job ] = [ ( None, content ) ]
else:
- implicit_outputs.append( content )
+ dataset_collection = content
+ # TODO: Optimize db call
+ # TODO: Ensure this is deterministic, must get same job
+ # for each dataset collection.
+ dataset_instance = dataset_collection.collection.dataset_instances[ 0 ]
+ if not self.__check_state( dataset_instance ):
+ # Just checking the state of one instance, don't need more but
+ # makes me wonder if even need this check at all?
+ continue
+
+ job_hda = self.__original_hda( dataset_instance )
+ if not job_hda.creating_job_associations:
+ log.warn( "An implicitly create output dataset collection doesn't have a creating_job_association, should not happen!" )
+ job = DatasetCollectionCreationJob( dataset_collection )
+ self.jobs[ job ] = [ ( None, dataset_collection ) ]
+
+ for assoc in job_hda.creating_job_associations:
+ job = assoc.job
+ if job not in self.jobs or self.jobs[ job ][ 0 ][ 1 ].history_content_type == "dataset":
+ self.jobs[ job ] = [ ( assoc.name, dataset_collection ) ]
+ self.implicit_map_jobs.append( job )
+ else:
+ self.jobs[ job ].append( ( assoc.name, dataset_collection ) )
else:
self.__append_dataset( content )
- for dataset_collection in implicit_outputs:
- # TODO: Optimize db call
- # TODO: Ensure this is deterministic, must get same job
- # for each dataset collection.
- dataset_instance = dataset_collection.collection.dataset_instances[ 0 ]
- if not self.__check_state( dataset_instance ):
- # Just checking the state of one instance, don't need more but
- # makes me wonder if even need this check at all?
- continue
-
- job_hda = self.__original_hda( dataset_instance )
- if not job_hda.creating_job_associations:
- log.warn( "An implicitly create output dataset collection doesn't have a creating_job_association, should not happen!" )
- job = DatasetCollectionCreationJob( dataset_collection )
- self.jobs[ job ] = [ ( None, dataset_collection ) ]
-
- for assoc in job_hda.creating_job_associations:
- job = assoc.job
- if job not in self.jobs or self.jobs[ job ][ 0 ][ 1 ].history_content_type == "dataset":
- self.jobs[ job ] = [ ( assoc.name, dataset_collection ) ]
- self.implicit_map_jobs.append( job )
- else:
- self.jobs[ job ].append( ( assoc.name, dataset_collection ) )
-
def __append_dataset( self, dataset ):
if not self.__check_state( dataset ):
return
@@ -289,7 +290,7 @@
if isinstance( input, ( SelectToolParameter, DrillDownSelectToolParameter ) ):
if input.is_dynamic and not isinstance( values[key], UnvalidatedValue ):
values[key] = UnvalidatedValue( values[key] )
- if isinstance( input, DataToolParameter ):
+ if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
tmp = values[key]
values[key] = None
# HACK: Nested associations are not yet working, but we
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Correctly handle the case where the AMQP SSL connection param parser gets params=None.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c0eb8b472f3c/
Changeset: c0eb8b472f3c
User: natefoo
Date: 2014-05-14 22:27:28
Summary: Correctly handle the case where the AMQP SSL connection param parser gets params=None.
Affected #: 1 file
diff -r b483d9df129c00480fc72253a277de18ab67b27d -r c0eb8b472f3ccabdfd1b0396dbf92c34f3140200 lib/galaxy/jobs/runners/lwr_client/util.py
--- a/lib/galaxy/jobs/runners/lwr_client/util.py
+++ b/lib/galaxy/jobs/runners/lwr_client/util.py
@@ -65,7 +65,9 @@
def parse_amqp_connect_ssl_params(params):
ssl = None
rval = None
- ssl_options = filter(lambda x: x.startswith('amqp_connect_ssl_'), params.keys())
+ ssl_options = []
+ if params:
+ ssl_options = filter(lambda x: x.startswith('amqp_connect_ssl_'), params.keys())
if ssl_options:
ssl = __import__('ssl')
rval = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: davebgx: Fix missing imports, add self.app in a few more places.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b483d9df129c/
Changeset: b483d9df129c
User: davebgx
Date: 2014-05-14 21:19:48
Summary: Fix missing imports, add self.app in a few more places.
Affected #: 1 file
diff -r c0832d30e315c24dcc2cf23918e4b0bd7b30ba56 -r b483d9df129c00480fc72253a277de18ab67b27d lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -19,6 +19,8 @@
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
from tool_shed.util import xml_util
+from tool_shed.galaxy_install.install_manager import InstallManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
@@ -497,12 +499,12 @@
tool_dependency,
error_message,
remove_installation_path=False )
- if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED,
- app.install_model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency and tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.INSTALLED,
+ self.app.install_model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
- if app.config.manage_dependency_relationships:
+ if self.app.config.manage_dependency_relationships:
# Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager.
- app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency )
+ self.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR:
print '\nThe ToolMigrationManager returned the following error while installing tool dependency ', installed_tool_dependency.name, ':'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f95cf49907cc/
Changeset: f95cf49907cc
User: jmchilton
Date: 2014-05-14 21:08:03
Summary: Bugfix: Annotation, tags on collections update looking in old location.
Moved to collection instances instead of collections a little before PR opened.
Affected #: 1 file
diff -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 -r f95cf49907cca1c57a2fad9738e66c9aa5078fb9 lib/galaxy/dataset_collections/__init__.py
--- a/lib/galaxy/dataset_collections/__init__.py
+++ b/lib/galaxy/dataset_collections/__init__.py
@@ -138,10 +138,10 @@
changed = dataset_collection_instance.set_from_dict( new_data )
# the rest (often involving the trans) - do here
if 'annotation' in new_data.keys() and trans.get_user():
- dataset_collection_instance.add_item_annotation( trans.sa_session, trans.get_user(), dataset_collection_instance.collection, new_data[ 'annotation' ] )
+ dataset_collection_instance.add_item_annotation( trans.sa_session, trans.get_user(), dataset_collection_instance, new_data[ 'annotation' ] )
changed[ 'annotation' ] = new_data[ 'annotation' ]
if 'tags' in new_data.keys() and trans.get_user():
- self.set_tags_from_list( trans, dataset_collection_instance.collection, new_data[ 'tags' ], user=trans.user )
+ self.set_tags_from_list( trans, dataset_collection_instance, new_data[ 'tags' ], user=trans.user )
if changed.keys():
trans.sa_session.flush()
https://bitbucket.org/galaxy/galaxy-central/commits/c0832d30e315/
Changeset: c0832d30e315
User: jmchilton
Date: 2014-05-14 21:08:56
Summary: Merge.
Affected #: 1 file
diff -r f95cf49907cca1c57a2fad9738e66c9aa5078fb9 -r c0832d30e315c24dcc2cf23918e4b0bd7b30ba56 lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -470,7 +470,7 @@
if index is not None:
tool_dependency = tool_dependencies[ index ]
tool_dependency, proceed_with_install, action_elem_tuples = \
- tag_manager.process_tag_set( trans.app,
+ tag_manager.process_tag_set( self.app,
tool_shed_repository,
tool_dependency,
elem,
@@ -480,7 +480,7 @@
tool_dependency_db_records=tool_dependencies )
if proceed_with_install:
try:
- tool_dependency = install_manager.install_package( trans.app,
+ tool_dependency = install_manager.install_package( self.app,
elem,
tool_shed_repository,
tool_dependencies=tool_dependencies,
@@ -493,7 +493,7 @@
# Since there was an installation error, update the tool dependency status to Error. The
# remove_installation_path option must be left False here.
tool_dependency = \
- tool_dependency_util.handle_tool_dependency_installation_error( trans.app,
+ tool_dependency_util.handle_tool_dependency_installation_error( self.app,
tool_dependency,
error_message,
remove_installation_path=False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for tool migration manager to use self.app instead of trans.app.
by commits-noreply@bitbucket.org 14 May '14
by commits-noreply@bitbucket.org 14 May '14
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a4699a3d7c2d/
Changeset: a4699a3d7c2d
User: greg
Date: 2014-05-14 21:07:30
Summary: Fix for tool migration manager to use self.app instead of trans.app.
Affected #: 1 file
diff -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 -r a4699a3d7c2de78ad5aa5ea9e79409e85050fa9f lib/tool_shed/galaxy_install/tool_migration_manager.py
--- a/lib/tool_shed/galaxy_install/tool_migration_manager.py
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -470,7 +470,7 @@
if index is not None:
tool_dependency = tool_dependencies[ index ]
tool_dependency, proceed_with_install, action_elem_tuples = \
- tag_manager.process_tag_set( trans.app,
+ tag_manager.process_tag_set( self.app,
tool_shed_repository,
tool_dependency,
elem,
@@ -480,7 +480,7 @@
tool_dependency_db_records=tool_dependencies )
if proceed_with_install:
try:
- tool_dependency = install_manager.install_package( trans.app,
+ tool_dependency = install_manager.install_package( self.app,
elem,
tool_shed_repository,
tool_dependencies=tool_dependencies,
@@ -493,7 +493,7 @@
# Since there was an installation error, update the tool dependency status to Error. The
# remove_installation_path option must be left False here.
tool_dependency = \
- tool_dependency_util.handle_tool_dependency_installation_error( trans.app,
+ tool_dependency_util.handle_tool_dependency_installation_error( self.app,
tool_dependency,
error_message,
remove_installation_path=False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
14 May '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6fecbad49afd/
Changeset: 6fecbad49afd
User: greg
Date: 2014-05-14 20:55:31
Summary: Phase 2 of the tool dependency package installation framework rewrite: 1) rename the RecipeManager class to be the StepManager class and add a new TagManager class, both of which are contained in the recipe_manager.py module. 2) Add appropriate new classes for handling recipe tag sets to a new tag_handler.py module. Add a new InstallManager class with functions for installing tool dependencies. Eliminate the use of fabric_util.py and install_util.py.
Affected #: 18 files
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -23,8 +23,11 @@
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
from tool_shed.util import workflow_util
+from tool_shed.util import xml_util
from tool_shed.galaxy_install import repository_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
+from tool_shed.galaxy_install.install_manager import InstallManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
import pkg_resources
eggs.require( 'mercurial' )
@@ -444,20 +447,74 @@
@web.expose
@web.require_admin
def initiate_tool_dependency_installation( self, trans, tool_dependencies, **kwd ):
- """Install specified dependencies for repository tools."""
+ """
+ Install specified dependencies for repository tools. The received list of tool_dependencies
+ are the database records for those dependencies defined in the tool_dependencies.xml file
+ (contained in the repository) that should be installed. This allows for filtering out dependencies
+ that have not been checked for installation on the 'Manage tool dependencies' page for an installed
+ tool shed repository.
+ """
# Get the tool_shed_repository from one of the tool_dependencies.
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
err_msg = ''
+ attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ]
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( suc.TOOL_DEPENDENCY_DEFINITION_FILENAME,
tool_shed_repository.repo_path( trans.app ) )
- installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_dependencies,
- from_tool_migration_manager=False )
+ # Parse the tool_dependencies.xml config.
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ installed_tool_dependencies = []
+ install_manager = InstallManager()
+ tag_manager = TagManager()
+ root = tree.getroot()
+ for elem in root:
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ # elem is a package tag set.
+ attr_tup = ( package_name, package_version, 'package' )
+ try:
+ index = attr_tups_of_dependencies_for_install.index( attr_tup )
+ except Exception, e:
+ index = None
+ if index is not None:
+ tool_dependency = tool_dependencies[ index ]
+ tool_dependency, proceed_with_install, action_elem_tuples = \
+ tag_manager.process_tag_set( trans.app,
+ tool_shed_repository,
+ tool_dependency,
+ elem,
+ package_name,
+ package_version,
+ from_tool_migration_manager=False,
+ tool_dependency_db_records=tool_dependencies )
+ if proceed_with_install:
+ try:
+ tool_dependency = install_manager.install_package( trans.app,
+ elem,
+ tool_shed_repository,
+ tool_dependencies=tool_dependencies,
+ from_tool_migration_manager=False )
+ except Exception, e:
+ error_message = "Error installing tool dependency package %s version %s: %s" % \
+ ( str( package_name ), str( package_version ), str( e ) )
+ log.exception( error_message )
+ if tool_dependency:
+ # Since there was an installation error, update the tool dependency status to Error. The
+ # remove_installation_path option must be left False here.
+ tool_dependency = \
+ tool_dependency_util.handle_tool_dependency_installation_error( trans.app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
+ if tool_dependency and tool_dependency.status in [ trans.app.install_model.ToolDependency.installation_status.INSTALLED,
+ trans.app.install_model.ToolDependency.installation_status.ERROR ]:
+ installed_tool_dependencies.append( tool_dependency )
+ if trans.app.config.manage_dependency_relationships:
+ # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager.
+ trans.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR:
text = util.unicodify( installed_tool_dependency.error_message )
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
--- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -185,8 +185,8 @@
operation='install latest revision' ) ),
grids.GridOperation( label="Install",
condition=( lambda item: \
- not item.deleted and \
- item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ),
+ not item.deleted and \
+ item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ),
allow_multiple=False,
url_args=dict( controller='admin_toolshed',
action='manage_repository',
@@ -196,7 +196,7 @@
not item.deleted and \
item.status not in \
[ tool_shed_install.ToolShedRepository.installation_status.ERROR,
- tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
+ tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
allow_multiple=False,
url_args=dict( controller='admin_toolshed',
action='browse_repositories',
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -1,8 +1,318 @@
import logging
import os
+from galaxy import eggs
+
+eggs.require( 'paramiko' )
+eggs.require( 'ssh' )
+eggs.require( 'Fabric' )
+
+from fabric.api import lcd
+
+from tool_shed.util import tool_dependency_util
+
+from tool_shed.galaxy_install.tool_dependencies import td_common_util
+from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
+from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
+
log = logging.getLogger( __name__ )
+INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet',
+ 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ]
+
class InstallManager( object ):
- pass
\ No newline at end of file
+
+ def get_tool_shed_repository_install_dir( self, app, tool_shed_repository ):
+ return os.path.abspath( tool_shed_repository.repo_files_directory( app ) )
+
+ def install_and_build_package( self, app, tool_shed_repository, tool_dependency, actions_dict ):
+ """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
+ tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( app, tool_shed_repository )
+ install_dir = actions_dict[ 'install_dir' ]
+ package_name = actions_dict[ 'package_name' ]
+ actions = actions_dict.get( 'actions', None )
+ filtered_actions = []
+ env_file_builder = EnvFileBuilder( install_dir )
+ install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir,
+ install_dir=install_dir )
+ step_manager = StepManager()
+ if actions:
+ with install_environment.make_tmp_dir() as work_dir:
+ with lcd( work_dir ):
+ # The first action in the list of actions will be the one that defines the initial download process.
+ # There are currently three supported actions; download_binary, download_by_url and clone via a
+ # shell_command action type. The recipe steps will be filtered at this stage in the process, with
+ # the filtered actions being used in the next stage below. The installation directory (i.e., dir)
+ # is also defined in this stage and is used in the next stage below when defining current_dir.
+ action_type, action_dict = actions[ 0 ]
+ if action_type in INSTALL_ACTIONS:
+ # Some of the parameters passed here are needed only by a subset of the step handler classes,
+ # but to allow for a standard method signature we'll pass them along. We don't check the
+ # tool_dependency status in this stage because it should not have been changed based on a
+ # download.
+ tool_dependency, filtered_actions, dir = \
+ step_manager.execute_step( app=app,
+ tool_dependency=tool_dependency,
+ package_name=package_name,
+ actions=actions,
+ action_type=action_type,
+ action_dict=action_dict,
+ filtered_actions=filtered_actions,
+ env_file_builder=env_file_builder,
+ install_environment=install_environment,
+ work_dir=work_dir,
+ current_dir=None,
+ initial_download=True )
+ else:
+ # We're handling a complex repository dependency where we only have a set_environment tag set.
+ # <action type="set_environment">
+ # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+ # </action>
+ filtered_actions = [ a for a in actions ]
+ dir = install_dir
+ # We're in stage 2 of the installation process. The package has been down-loaded, so we can
+ # now perform all of the actions defined for building it.
+ for action_tup in filtered_actions:
+ current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+ with lcd( current_dir ):
+ action_type, action_dict = action_tup
+ tool_dependency, tmp_filtered_actions, tmp_dir = \
+ step_manager.execute_step( app=app,
+ tool_dependency=tool_dependency,
+ package_name=package_name,
+ actions=actions,
+ action_type=action_type,
+ action_dict=action_dict,
+ filtered_actions=filtered_actions,
+ env_file_builder=env_file_builder,
+ install_environment=install_environment,
+ work_dir=work_dir,
+ current_dir=current_dir,
+ initial_download=False )
+ if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.ERROR ]:
+ # If the tool_dependency status is in an error state, return it with no additional
+ # processing.
+ return tool_dependency
+ # Make sure to handle the special case where the value of dir is reset (this happens when
+ # the action_type is change_directiory). In all other action types, dir will be returned as
+ # None.
+ if tmp_dir is not None:
+ dir = tmp_dir
+ return tool_dependency
+
+ def install_and_build_package_via_fabric( self, app, tool_shed_repository, tool_dependency, actions_dict ):
+ sa_session = app.install_model.context
+ try:
+ # There is currently only one fabric method.
+ tool_dependency = self.install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict )
+ except Exception, e:
+ log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) )
+ # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must
+ # be left False here.
+ error_message = '%s\n%s' % ( td_common_util.format_traceback(), str( e ) )
+ tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
+ tool_dependency = tool_dependency_util.mark_tool_dependency_installed( app, tool_dependency )
+ return tool_dependency
+
+ def install_via_fabric( self, app, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None,
+ actions_elem=None, action_elem=None, **kwd ):
+ """
+ Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using
+ self.install_and_build_package(). The use of fabric is being eliminated, so some of these functions
+ may need to be renamed at some point.
+ """
+ sa_session = app.install_model.context
+ if not os.path.exists( install_dir ):
+ os.makedirs( install_dir )
+ actions_dict = dict( install_dir=install_dir )
+ if package_name:
+ actions_dict[ 'package_name' ] = package_name
+ actions = []
+ is_binary_download = False
+ if actions_elem is not None:
+ elems = actions_elem
+ if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None:
+ is_binary_download = True
+ elif action_elem is not None:
+ # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded.
+ elems = [ action_elem ]
+ else:
+ elems = []
+ step_manager = StepManager()
+ tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( app, tool_shed_repository )
+ install_environment = InstallEnvironment( tool_shed_repository_install_dir, install_dir )
+ for action_elem in elems:
+ # Make sure to skip all comments, since they are now included in the XML tree.
+ if action_elem.tag != 'action':
+ continue
+ action_dict = {}
+ action_type = action_elem.get( 'type', None )
+ if action_type is not None:
+ action_dict = step_manager.prepare_step( app=app,
+ tool_dependency=tool_dependency,
+ action_type=action_type,
+ action_elem=action_elem,
+ action_dict=action_dict,
+ install_environment=install_environment,
+ is_binary_download=is_binary_download )
+ action_tuple = ( action_type, action_dict )
+ if action_type == 'set_environment':
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ else:
+ actions.append( action_tuple )
+ if actions:
+ actions_dict[ 'actions' ] = actions
+ if custom_fabfile_path is not None:
+ # TODO: this is not yet supported or functional, but when it is handle it using the fabric api.
+ raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
+ else:
+ tool_dependency = self.install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict )
+ return tool_dependency
+
+ def install_package( self, app, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ):
+ """
+ Install a tool dependency package defined by the XML element elem. The value of tool_dependencies is
+ a partial or full list of ToolDependency records associated with the tool_shed_repository.
+ """
+ tag_manager = TagManager()
+ sa_session = app.install_model.context
+ # The value of package_name should match the value of the "package" type in the tool config's
+ # <requirements> tag set, but it's not required.
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if tool_dependencies and package_name and package_version:
+ tool_dependency = None
+ for tool_dependency in tool_dependencies:
+ if package_name == str( tool_dependency.name ) and package_version == str( tool_dependency.version ):
+ break
+ if tool_dependency is not None:
+ for package_elem in elem:
+ tool_dependency, proceed_with_install, actions_elem_tuples = \
+ tag_manager.process_tag_set( app,
+ tool_shed_repository,
+ tool_dependency,
+ package_elem,
+ package_name,
+ package_version,
+ from_tool_migration_manager=from_tool_migration_manager,
+ tool_dependency_db_records=None )
+ if proceed_with_install and actions_elem_tuples:
+ # Get the installation directory for tool dependencies that will be installed for the received
+ # tool_shed_repository.
+ install_dir = \
+ tool_dependency_util.get_tool_dependency_install_dir( app=app,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ # At this point we have a list of <actions> elems that are either defined within an <actions_group>
+ # tag set with <actions> sub-elements that contains os and architecture attributes filtered by the
+ # platform into which the appropriate compiled binary will be installed, or not defined within an
+ # <actions_group> tag set and not filtered. Here is an example actions_elem_tuple.
+ # [(True, [<Element 'actions' at 0x109293d10>)]
+ binary_installed = False
+ for actions_elem_tuple in actions_elem_tuples:
+ in_actions_group, actions_elems = actions_elem_tuple
+ if in_actions_group:
+ # Platform matching is only performed inside <actions_group> tag sets, os and architecture
+ # attributes are otherwise ignored.
+ can_install_from_source = False
+ for actions_elem in actions_elems:
+ system = actions_elem.get( 'os' )
+ architecture = actions_elem.get( 'architecture' )
+ # If this <actions> element has the os and architecture attributes defined, then we only
+ # want to process until a successful installation is achieved.
+ if system and architecture:
+ # If an <actions> tag has been defined that matches our current platform, and the
+ # recipe specified within that <actions> tag has been successfully processed, skip
+ # any remaining platform-specific <actions> tags. We cannot break out of the loop
+ # here because there may be <action> tags at the end of the <actions_group> tag set
+ # that must be processed.
+ if binary_installed:
+ continue
+ # No platform-specific <actions> recipe has yet resulted in a successful installation.
+ tool_dependency = self.install_via_fabric( app,
+ tool_shed_repository,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=actions_elem,
+ action_elem=None )
+ if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLED:
+ # If an <actions> tag was found that matches the current platform, and
+ # self.install_via_fabric() did not result in an error state, set binary_installed
+ # to True in order to skip any remaining platform-specific <actions> tags.
+ binary_installed = True
+ else:
+ # Process the next matching <actions> tag, or any defined <actions> tags that do not
+ # contain platform dependent recipes.
+ log.debug( 'Error downloading binary for tool dependency %s version %s: %s' % \
+ ( str( package_name ), str( package_version ), str( tool_dependency.error_message ) ) )
+ else:
+ if actions_elem.tag == 'actions':
+ # We've reached an <actions> tag that defines the recipe for installing and compiling from
+ # source. If binary installation failed, we proceed with the recipe.
+ if not binary_installed:
+ installation_directory = tool_dependency.installation_directory( app )
+ if os.path.exists( installation_directory ):
+ # Delete contents of installation directory if attempt at binary installation failed.
+ installation_directory_contents = os.listdir( installation_directory )
+ if installation_directory_contents:
+ removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency )
+ if removed:
+ can_install_from_source = True
+ else:
+ log.debug( 'Error removing old files from installation directory %s: %s' % \
+ ( str( tool_dependency.installation_directory( app ), str( error_message ) ) ) )
+ else:
+ can_install_from_source = True
+ else:
+ can_install_from_source = True
+ if can_install_from_source:
+ # We now know that binary installation was not successful, so proceed with the <actions>
+ # tag set that defines the recipe to install and compile from source.
+ log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' % \
+ str( tool_dependency.name ) )
+ tool_dependency = self.install_via_fabric( app,
+ tool_shed_repository,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=actions_elem,
+ action_elem=None )
+ if actions_elem.tag == 'action' and \
+ tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
+ # If the tool dependency is not in an error state, perform any final actions that have been
+ # defined within the actions_group tag set, but outside of an <actions> tag, which defines
+ # the recipe for installing and compiling from source.
+ tool_dependency = self.install_via_fabric( app,
+ tool_shed_repository,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=None,
+ action_elem=actions_elem )
+ else:
+ # Checks for "os" and "architecture" attributes are not made for any <actions> tag sets outside of
+ # an <actions_group> tag set. If the attributes are defined, they will be ignored. All <actions> tags
+ # outside of an <actions_group> tag set will always be processed.
+ tool_dependency = self.install_via_fabric( app,
+ tool_shed_repository,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=actions_elems,
+ action_elem=None )
+ if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
+ log.debug( 'Tool dependency %s version %s has been installed in %s.' % \
+ ( str( package_name ), str( package_version ), str( install_dir ) ) )
+ return tool_dependency
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -20,7 +20,10 @@
from tool_shed.util import metadata_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
-from xml.etree import ElementTree as XmlET
+from tool_shed.util import xml_util
+
+from tool_shed.galaxy_install.install_manager import InstallManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
from galaxy import eggs
eggs.require( 'mercurial' )
@@ -621,11 +624,58 @@
trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )
- installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_shed_repository.tool_dependencies,
- from_tool_migration_manager=False )
+ # Parse the tool_dependencies.xml config.
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ install_manager = InstallManager()
+ tag_manager = TagManager()
+ root = tree.getroot()
+ for elem in root:
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ repository_tool_dependencies = util.listify( tool_shed_repository.tool_dependencies )
+ # elem is a package tag set.
+ attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in repository_tool_dependencies ]
+ attr_tup = ( package_name, package_version, 'package' )
+ try:
+ index = attr_tups_of_dependencies_for_install.index( attr_tup )
+ except Exception, e:
+ index = None
+ if index is not None:
+ tool_dependency = repository_tool_dependencies[ index ]
+ tool_dependency, proceed_with_install, action_elem_tuples = \
+ tag_manager.process_tag_set( trans.app,
+ tool_shed_repository,
+ tool_dependency,
+ elem,
+ package_name,
+ package_version,
+ from_tool_migration_manager=False,
+ tool_dependency_db_records=repository_tool_dependencies )
+ if proceed_with_install:
+ try:
+ tool_dependency = install_manager.install_package( trans.app,
+ elem,
+ tool_shed_repository,
+ tool_dependencies=repository_tool_dependencies,
+ from_tool_migration_manager=False )
+ except Exception, e:
+ error_message = "Error installing tool dependency package %s version %s: %s" % \
+ ( str( package_name ), str( package_version ), str( e ) )
+ log.exception( error_message )
+ if tool_dependency:
+ # Since there was an installation error, update the tool dependency status to Error. The
+ # remove_installation_path option must be left False here.
+ tool_dependency = \
+ tool_dependency_util.handle_tool_dependency_installation_error( trans.app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
+ if tool_dependency and tool_dependency.status in [ trans.app.install_model.ToolDependency.installation_status.INSTALLED,
+ trans.app.install_model.ToolDependency.installation_status.ERROR ]:
+ if trans.app.config.manage_dependency_relationships:
+ # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager.
+ trans.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency )
suc.remove_dir( work_dir )
suc.update_tool_shed_repository_status( trans.app,
tool_shed_repository,
@@ -877,11 +927,61 @@
trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( trans.app ) )
- installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=repository.tool_dependencies,
- from_tool_migration_manager=False )
+ installed_tool_dependencies = []
+ # Parse the tool_dependencies.xml config.
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
+ return installed_tool_dependencies
+ install_manager = InstallManager()
+ tag_manager = TagManager()
+ root = tree.getroot()
+ for elem in root:
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ # elem is a package tag set.
+ attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ]
+ attr_tup = ( package_name, package_version, 'package' )
+ try:
+ index = attr_tups_of_dependencies_for_install.index( attr_tup )
+ except Exception, e:
+ index = None
+ if index is not None:
+ tool_dependency = tool_dependency_db_records[ index ]
+ tool_dependency, proceed_with_install, action_elem_tuples = \
+ tag_manager.process_tag_set( trans.app,
+ tool_shed_repository,
+ tool_dependency,
+ elem,
+ package_name,
+ package_version,
+ from_tool_migration_manager=False,
+ tool_dependency_db_records=tool_dependencies )
+ if proceed_with_install:
+ try:
+ tool_dependency = install_manager.install_package( trans.app,
+ elem,
+ tool_shed_repository,
+ tool_dependencies=tool_dependencies,
+ from_tool_migration_manager=False )
+ except Exception, e:
+ error_message = "Error installing tool dependency package %s version %s: %s" % \
+ ( str( package_name ), str( package_version ), str( e ) )
+ log.exception( error_message )
+ if tool_dependency:
+ # Since there was an installation error, update the tool dependency status to Error. The
+ # remove_installation_path option must be left False here.
+ tool_dependency = \
+ tool_dependency_util.handle_tool_dependency_installation_error( trans.app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
+ if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED,
+ app.install_model.ToolDependency.installation_status.ERROR ]:
+ installed_tool_dependencies.append( tool_dependency )
+ if app.config.manage_dependency_relationships:
+ # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager.
+ app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]:
repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message )
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ /dev/null
@@ -1,103 +0,0 @@
-import logging
-import os
-
-from galaxy import eggs
-
-eggs.require( 'paramiko' )
-eggs.require( 'ssh' )
-eggs.require( 'Fabric' )
-
-from fabric.api import env
-from fabric.api import lcd
-
-from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder
-from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import InstallEnvironment
-from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import RecipeManager
-
-log = logging.getLogger( __name__ )
-
-INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet',
- 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ]
-
-def check_fabric_version():
- version = env.version
- if int( version.split( "." )[ 0 ] ) < 1:
- raise NotImplementedError( "Install Fabric version 1.0 or later." )
-
-def get_tool_shed_repository_install_dir( app, tool_shed_repository ):
- return os.path.abspath( tool_shed_repository.repo_files_directory( app ) )
-
-def install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict ):
- """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
- tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository )
- install_dir = actions_dict[ 'install_dir' ]
- package_name = actions_dict[ 'package_name' ]
- actions = actions_dict.get( 'actions', None )
- filtered_actions = []
- env_file_builder = EnvFileBuilder( install_dir )
- install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir,
- install_dir=install_dir )
- recipe_manager = RecipeManager()
- if actions:
- with install_environment.make_tmp_dir() as work_dir:
- with lcd( work_dir ):
- # The first action in the list of actions will be the one that defines the initial download process.
- # There are currently three supported actions; download_binary, download_by_url and clone via a
- # shell_command action type. The recipe steps will be filtered at this stage in the process, with
- # the filtered actions being used in the next stage below. The installation directory (i.e., dir)
- # is also defined in this stage and is used in the next stage below when defining current_dir.
- action_type, action_dict = actions[ 0 ]
- if action_type in INSTALL_ACTIONS:
- # Some of the parameters passed here are needed only by a subset of the step handler classes,
- # but to allow for a standard method signature we'll pass them along. We don't check the
- # tool_dependency status in this stage because it should not have been changed based on a
- # download.
- tool_dependency, filtered_actions, dir = \
- recipe_manager.execute_step( app=app,
- tool_dependency=tool_dependency,
- package_name=package_name,
- actions=actions,
- action_type=action_type,
- action_dict=action_dict,
- filtered_actions=filtered_actions,
- env_file_builder=env_file_builder,
- install_environment=install_environment,
- work_dir=work_dir,
- current_dir=None,
- initial_download=True )
- else:
- # We're handling a complex repository dependency where we only have a set_environment tag set.
- # <action type="set_environment">
- # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
- # </action>
- filtered_actions = [ a for a in actions ]
- dir = install_dir
- # We're in stage 2 of the installation process. The package has been down-loaded, so we can
- # now perform all of the actions defined for building it.
- for action_tup in filtered_actions:
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- with lcd( current_dir ):
- action_type, action_dict = action_tup
- tool_dependency, tmp_filtered_actions, tmp_dir = \
- recipe_manager.execute_step( app=app,
- tool_dependency=tool_dependency,
- package_name=package_name,
- actions=actions,
- action_type=action_type,
- action_dict=action_dict,
- filtered_actions=filtered_actions,
- env_file_builder=env_file_builder,
- install_environment=install_environment,
- work_dir=work_dir,
- current_dir=current_dir,
- initial_download=False )
- if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.ERROR ]:
- # If the tool_dependency status is in an error state, return it with no additional
- # processing.
- return tool_dependency
- # Make sure to handle the special case where the value of dir is reset (this happens when
- # the action_type is change_directiory). In all other action types, dir will be returned as
- # None.
- if tmp_dir is not None:
- dir = tmp_dir
- return tool_dependency
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ /dev/null
@@ -1,734 +0,0 @@
-import logging
-import os
-import shutil
-import stat
-import subprocess
-import sys
-import tempfile
-import fabric_util
-import td_common_util
-from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder
-from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import InstallEnvironment
-from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import RecipeManager
-import tool_shed.util.shed_util_common as suc
-from tool_shed.util import common_util
-from tool_shed.util import encoding_util
-from tool_shed.util import tool_dependency_util
-from tool_shed.util import xml_util
-from tool_shed.galaxy_install.tool_dependencies import td_common_util
-from galaxy.model.orm import and_
-from galaxy.util import asbool
-from galaxy.util import listify
-
-log = logging.getLogger( __name__ )
-
-def create_temporary_tool_dependencies_config( app, tool_shed_url, name, owner, changeset_revision ):
- """Make a call to the tool shed to get the required repository's tool_dependencies.xml file."""
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
- params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision )
- url = common_util.url_join( tool_shed_url,
- 'repository/get_tool_dependencies_config_contents%s' % params )
- text = common_util.tool_shed_get( app, tool_shed_url, url )
- if text:
- # Write the contents to a temporary file on disk so it can be reloaded and parsed.
- fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cttdc" )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( text )
- fh.close()
- return tmp_filename
- else:
- message = "Unable to retrieve required tool_dependencies.xml file from the tool shed for revision "
- message += "%s of installed repository %s owned by %s." % ( str( changeset_revision ), str( name ), str( owner ) )
- raise Exception( message )
- return None
-
-def create_tool_dependency_with_initialized_env_sh_file( app, dependent_install_dir, tool_shed_repository, required_repository, package_name,
- package_version, tool_dependencies_config ):
- """
- Create or get a tool_dependency record that is defined by the received package_name and package_version. An env.sh file will be
- created for the tool_dependency in the received dependent_install_dir.
- """
- #The received required_repository refers to a tool_shed_repository record that is defined as a complex repository dependency for this
- # tool_dependency. The required_repository may or may not be currently installed (it doesn't matter). If it is installed, it is
- # associated with a tool_dependency that has an env.sh file that this new tool_dependency must be able to locate and "source". If it
- # is not installed, we can still determine where that env.sh file will be, so we'll initialize this new tool_dependency's env.sh file
- # in either case. If the require repository end up with an installation error, this new tool dependency will still be fine because its
- # containing repository will be defined as missing dependencies.
- tool_dependencies = []
- if not os.path.exists( dependent_install_dir ):
- os.makedirs( dependent_install_dir )
- required_tool_dependency_env_file_path = None
- if tool_dependencies_config:
- required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
- if required_td_tree:
- required_td_root = required_td_tree.getroot()
- for required_td_elem in required_td_root:
- # Find the appropriate package name and version.
- if required_td_elem.tag == 'package':
- # <package name="bwa" version="0.5.9">
- required_td_package_name = required_td_elem.get( 'name', None )
- required_td_package_version = required_td_elem.get( 'version', None )
- # Check the database to see if we have a record for the required tool dependency (we may not which is ok). If we
- # find a record, we need to see if it is in an error state and if so handle it appropriately.
- required_tool_dependency = \
- tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app,
- required_repository,
- required_td_package_name,
- required_td_package_version,
- 'package' )
- if required_td_package_name == package_name and required_td_package_version == package_version:
- # Get or create a database tool_dependency record with which the installed package on disk will be associated.
- tool_dependency = \
- tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
- set_status=True )
- # Create an env.sh file for the tool_dependency whose first line will source the env.sh file located in
- # the path defined by required_tool_dependency_env_file_path. It doesn't matter if the required env.sh
- # file currently exists..
- required_tool_dependency_env_file_path = \
- tool_dependency_util.get_required_repository_package_env_sh_path( app,
- package_name,
- package_version,
- required_repository )
- env_file_builder = EnvFileBuilder( tool_dependency.installation_directory( app ) )
- env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path )
- return_code = env_file_builder.return_code
- if return_code:
- error_message = 'Error defining env.sh file for package %s, return_code: %s' % \
- ( str( package_name ), str( return_code ) )
- tool_dependency = \
- tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
- elif required_tool_dependency is not None and required_tool_dependency.in_error_state:
- error_message = "This tool dependency's required tool dependency %s version %s has status %s." % \
- ( str( required_tool_dependency.name ), str( required_tool_dependency.version ), str( required_tool_dependency.status ) )
- tool_dependency = \
- tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
- else:
- tool_dependency = \
- tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.INSTALLED )
- tool_dependencies.append( tool_dependency )
- return tool_dependencies
-
-def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
- """Return the absolute path to a specified disk file contained in a repository."""
- stripped_file_name = strip_path( file_name )
- file_path = None
- for root, dirs, files in os.walk( repo_files_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == stripped_file_name:
- return os.path.abspath( os.path.join( root, name ) )
- return file_path
-
-def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ):
- sa_session = app.install_model.context
- # The protocol is not stored, but the port is if it exists.
- tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url )
- tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \
- .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.install_model.ToolShedRepository.table.c.name == name,
- app.install_model.ToolShedRepository.table.c.owner == owner,
- app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
- if tool_shed_repository:
- return tool_shed_repository
- # The tool_shed_repository must have been updated to a newer changeset revision than the one defined in the repository_dependencies.xml file,
- # so call the tool shed to get all appropriate newer changeset revisions.
- text = get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision )
- if text:
- changeset_revisions = listify( text )
- for changeset_revision in changeset_revisions:
- tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \
- .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.install_model.ToolShedRepository.table.c.name == name,
- app.install_model.ToolShedRepository.table.c.owner == owner,
- app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
- if tool_shed_repository:
- return tool_shed_repository
- return None
-
-def get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision ):
- """
- Get all appropriate newer changeset revisions for the repository defined by
- the received tool_shed_url / name / owner combination.
- """
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
- params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision )
- url = common_util.url_join( tool_shed_url,
- 'repository/updated_changeset_revisions%s' % params )
- text = common_util.tool_shed_get( app, tool_shed_url, url )
- return text
-
-
-def handle_complex_repository_dependency_for_package( app, elem, package_name, package_version, tool_shed_repository, from_tool_migration_manager=False ):
- """
- Inspect the repository defined by a complex repository dependency definition and take certain steps to enable installation
- of the received package name and version to proceed. The received elem is the <repository> tag set which defines the complex
- repository dependency. The received tool_shed_repository is the installed tool shed repository for which the tool dependency
- defined by the received package_name and package_version is being installed.
- """
- handled_tool_dependencies = []
- tool_shed = elem.attrib[ 'toolshed' ]
- # The protocol is not stored, but the port is if it exists.
- tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
- required_repository_name = elem.attrib[ 'name' ]
- required_repository_owner = elem.attrib[ 'owner' ]
- default_required_repository_changeset_revision = elem.attrib[ 'changeset_revision' ]
- required_repository = get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app,
- tool_shed,
- required_repository_name,
- required_repository_owner,
- default_required_repository_changeset_revision )
- tmp_filename = None
- if required_repository:
- required_repository_changeset_revision = required_repository.installed_changeset_revision
- # Define the installation directory for the required tool dependency package in the required repository.
- required_repository_package_install_dir = \
- tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=required_repository_name,
- repository_owner=required_repository_owner,
- repository_changeset_revision=required_repository_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- # Define this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the
- # required repository's installed tool dependency package.
- dependent_install_dir = \
- tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- if os.path.exists( dependent_install_dir ):
- # The install manager handles tool migration stages and the sync_database_with_file_system()
- # method handles two scenarios: (1) where a Galaxy file system environment related to installed
- # Tool Shed repositories and tool dependencies has somehow gotten out of sync with the Galaxy
- # database tables associated with these installed items, and (2) the Tool Shed's install and test
- # framework which installs repositories in 2 stages, those of type tool_dependency_definition
- # followed by those containing valid tools and tool functional test components. Neither of these
- # scenarios apply when the install manager is running.
- if from_tool_migration_manager:
- can_install_tool_dependency = True
- else:
- # Notice that we'll throw away the following tool_dependency if it can be installed.
- tool_dependency, can_install_tool_dependency = \
- tool_dependency_util.sync_database_with_file_system( app,
- tool_shed_repository,
- package_name,
- package_version,
- dependent_install_dir,
- tool_dependency_type='package' )
- if not can_install_tool_dependency:
- log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), " % \
- ( str( tool_dependency.name, str( tool_dependency.version ) ) ) )
- log.debug( "so appending it to the list of handled tool dependencies." )
- handled_tool_dependencies.append( tool_dependency )
- else:
- can_install_tool_dependency = True
- if can_install_tool_dependency:
- # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package.
- # We can get everything we need from the discovered installed required_repository.
- if required_repository.is_deactivated_or_installed:
- if not os.path.exists( required_repository_package_install_dir ):
- print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir )
- repo_files_dir = required_repository.repo_files_directory( app )
- tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' )
- if tool_dependencies_config:
- config_to_use = tool_dependencies_config
- else:
- message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \
- ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
- raise Exception( message )
- else:
- # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision
- # should be updated if it's not current.
- text = get_updated_changeset_revisions_from_tool_shed( app=app,
- tool_shed_url=tool_shed,
- name=required_repository_name,
- owner=required_repository_owner,
- changeset_revision=required_repository_changeset_revision )
- if text:
- updated_changeset_revisions = listify( text )
- # The list of changeset revisions is in reverse order, so the newest will be first.
- required_repository_changeset_revision = updated_changeset_revisions[ 0 ]
- # Make a call to the tool shed to get the required repository's tool_dependencies.xml file.
- tmp_filename = create_temporary_tool_dependencies_config( app,
- tool_shed,
- required_repository_name,
- required_repository_owner,
- required_repository_changeset_revision )
- config_to_use = tmp_filename
- handled_tool_dependencies = create_tool_dependency_with_initialized_env_sh_file( app=app,
- dependent_install_dir=dependent_install_dir,
- tool_shed_repository=tool_shed_repository,
- required_repository=required_repository,
- package_name=package_name,
- package_version=package_version,
- tool_dependencies_config=config_to_use )
- suc.remove_file( tmp_filename )
- else:
- message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \
- ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) )
- raise Exception( message )
- return handled_tool_dependencies
-
-def install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict ):
- sa_session = app.install_model.context
- try:
- # There is currently only one fabric method.
- tool_dependency = fabric_util.install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict )
- except Exception, e:
- log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) )
- # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must
- # be left False here.
- error_message = '%s\n%s' % ( td_common_util.format_traceback(), str( e ) )
- tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
- tool_dependency = tool_dependency_util.mark_tool_dependency_installed( app, tool_dependency )
- return tool_dependency
-
-def install_package( app, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ):
- """
- Install a tool dependency package defined by the XML element elem. The value of tool_dependencies is
- a partial or full list of ToolDependency records associated with the tool_shed_repository.
- """
- sa_session = app.install_model.context
- tool_dependency = None
- # The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required.
- package_name = elem.get( 'name', None )
- package_version = elem.get( 'version', None )
- if tool_dependencies and package_name and package_version:
- for package_elem in elem:
- if package_elem.tag == 'repository':
- # We have a complex repository dependency definition.
- rd_tool_dependencies = handle_complex_repository_dependency_for_package( app,
- package_elem,
- package_name,
- package_version,
- tool_shed_repository,
- from_tool_migration_manager=from_tool_migration_manager )
- for rd_tool_dependency in rd_tool_dependencies:
- if rd_tool_dependency.status == app.install_model.ToolDependency.installation_status.ERROR:
- # We'll log the error here, but continue installing packages since some may not require this dependency.
- print "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message )
- elif package_elem.tag == 'install':
- # <install version="1.0">
- # Get the installation directory for tool dependencies that will be installed for the received tool_shed_repository.
- install_dir = tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- if os.path.exists( install_dir ):
- # The install manager handles tool migration stages and the sync_database_with_file_system()
- # method handles two scenarios: (1) where a Galaxy file system environment related to installed
- # Tool Shed repositories and tool dependencies has somehow gotten out of sync with the Galaxy
- # database tables associated with these installed items, and (2) the Tool Shed's install and test
- # framework which installs repositories in 2 stages, those of type tool_dependency_definition
- # followed by those containing valid tools and tool functional test components. Neither of these
- # scenarios apply when the install manager is running.
- if from_tool_migration_manager:
- can_install_tool_dependency = True
- else:
- # Notice that we'll throw away the following tool_dependency if it can be installed.
- tool_dependency, can_install_tool_dependency = \
- tool_dependency_util.sync_database_with_file_system( app,
- tool_shed_repository,
- package_name,
- package_version,
- install_dir,
- tool_dependency_type='package' )
- if not can_install_tool_dependency:
- log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), so returning it." % \
- ( str( tool_dependency.name ), str( tool_dependency.version ) ) )
- return tool_dependency
- else:
- can_install_tool_dependency = True
- if can_install_tool_dependency:
- package_install_version = package_elem.get( 'version', '1.0' )
- status = app.install_model.ToolDependency.installation_status.INSTALLING
- tool_dependency = \
- tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=status,
- set_status=True )
- # Get the information about the current platform in case the tool dependency definition includes tag sets
- # for installing compiled binaries.
- platform_info_dict = tool_dependency_util.get_platform_info_dict()
- if package_install_version == '1.0':
- # Handle tool dependency installation using a fabric method included in the Galaxy framework.
- actions_elem_tuples = td_common_util.parse_package_elem( package_elem,
- platform_info_dict=platform_info_dict,
- include_after_install_actions=True )
- if actions_elem_tuples:
- # At this point we have a list of <actions> elems that are either defined within an <actions_group>
- # tag set with <actions> sub-elements that contains os and architecture attributes filtered by the
- # platform into which the appropriate compiled binary will be installed, or not defined within an
- # <actions_group> tag set and not filtered. Here is an example actions_elem_tuple.
- # [(True, [<Element 'actions' at 0x109293d10>)]
- binary_installed = False
- for actions_elem_tuple in actions_elem_tuples:
- in_actions_group, actions_elems = actions_elem_tuple
- if in_actions_group:
- # Platform matching is only performed inside <actions_group> tag sets, os and architecture
- # attributes are otherwise ignored.
- can_install_from_source = False
- for actions_elem in actions_elems:
- system = actions_elem.get( 'os' )
- architecture = actions_elem.get( 'architecture' )
- # If this <actions> element has the os and architecture attributes defined, then we only
- # want to process until a successful installation is achieved.
- if system and architecture:
- # If an <actions> tag has been defined that matches our current platform, and the
- # recipe specified within that <actions> tag has been successfully processed, skip
- # any remaining platform-specific <actions> tags. We cannot break out of the loop
- # here because there may be <action> tags at the end of the <actions_group> tag set
- # that must be processed.
- if binary_installed:
- continue
- # No platform-specific <actions> recipe has yet resulted in a successful installation.
- tool_dependency = install_via_fabric( app,
- tool_shed_repository,
- tool_dependency,
- install_dir,
- package_name=package_name,
- actions_elem=actions_elem,
- action_elem=None )
- if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLED:
- # If an <actions> tag was found that matches the current platform, and the
- # install_via_fabric method did not result in an error state, set binary_installed
- # to True in order to skip any remaining platform-specific <actions> tags.
- binary_installed = True
- else:
- # Process the next matching <actions> tag, or any defined <actions> tags that do not
- # contain platform dependent recipes.
- log.debug( 'Error downloading binary for tool dependency %s version %s: %s' % \
- ( str( package_name ), str( package_version ), str( tool_dependency.error_message ) ) )
- else:
- if actions_elem.tag == 'actions':
- # We've reached an <actions> tag that defines the recipe for installing and compiling from
- # source. If binary installation failed, we proceed with the recipe.
- if not binary_installed:
- installation_directory = tool_dependency.installation_directory( app )
- if os.path.exists( installation_directory ):
- # Delete contents of installation directory if attempt at binary installation failed.
- installation_directory_contents = os.listdir( installation_directory )
- if installation_directory_contents:
- removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency )
- if removed:
- can_install_from_source = True
- else:
- log.debug( 'Error removing old files from installation directory %s: %s' % \
- ( str( tool_dependency.installation_directory( app ), str( error_message ) ) ) )
- else:
- can_install_from_source = True
- else:
- can_install_from_source = True
- if can_install_from_source:
- # We now know that binary installation was not successful, so proceed with the <actions>
- # tag set that defines the recipe to install and compile from source.
- log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' % \
- str( tool_dependency.name ) )
- tool_dependency = install_via_fabric( app,
- tool_shed_repository,
- tool_dependency,
- install_dir,
- package_name=package_name,
- actions_elem=actions_elem,
- action_elem=None )
- if actions_elem.tag == 'action' and tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
- # If the tool dependency is not in an error state, perform any final actions that have been
- # defined within the actions_group tag set, but outside of an <actions> tag, which defines
- # the recipe for installing and compiling from source.
- tool_dependency = install_via_fabric( app,
- tool_shed_repository,
- tool_dependency,
- install_dir,
- package_name=package_name,
- actions_elem=None,
- action_elem=actions_elem )
- else:
- # Checks for "os" and "architecture" attributes are not made for any <actions> tag sets outside of
- # an <actions_group> tag set. If the attributes are defined, they will be ignored. All <actions> tags
- # outside of an <actions_group> tag set will always be processed.
- tool_dependency = install_via_fabric( app,
- tool_shed_repository,
- tool_dependency,
- install_dir,
- package_name=package_name,
- actions_elem=actions_elems,
- action_elem=None )
- if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
- log.debug( 'Tool dependency %s version %s has been installed in %s.' % \
- ( str( package_name ), str( package_version ), str( install_dir ) ) )
- else:
- error_message = 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) )
- error_message += 'the recipe for installing the package is missing either an <actions> tag set or an <actions_group> '
- error_message += 'tag set.'
- # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must
- # be left False here.
- tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
- return tool_dependency
- else:
- raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
- elif package_elem.tag == 'readme':
- # Nothing to be done.
- continue
- #elif package_elem.tag == 'custom_fabfile':
- # # TODO: This is not yet supported or functionally correct...
- # # Handle tool dependency installation where the repository includes one or more custom fabric scripts.
- # if not fabric_version_checked:
- # check_fabric_version()
- # fabric_version_checked = True
- # fabfile_name = package_elem.get( 'name', None )
- # custom_fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
- # print 'Installing tool dependencies via fabric script ', custom_fabfile_path
- return tool_dependency
-
-def install_via_fabric( app, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None,
- actions_elem=None, action_elem=None, **kwd ):
- """
- Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using the
- fabric_util.install_and_build_package() method. The use of fabric is being eliminated, so some of these
- functions may need to be renamed at some point.
- """
- sa_session = app.install_model.context
- if not os.path.exists( install_dir ):
- os.makedirs( install_dir )
- actions_dict = dict( install_dir=install_dir )
- if package_name:
- actions_dict[ 'package_name' ] = package_name
- actions = []
- is_binary_download = False
- if actions_elem is not None:
- elems = actions_elem
- if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None:
- is_binary_download = True
- elif action_elem is not None:
- # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded.
- elems = [ action_elem ]
- else:
- elems = []
- recipe_manager = RecipeManager()
- tool_shed_repository_install_dir = fabric_util.get_tool_shed_repository_install_dir( app, tool_shed_repository )
- install_environment = InstallEnvironment( tool_shed_repository_install_dir, install_dir )
- for action_elem in elems:
- # Make sure to skip all comments, since they are now included in the XML tree.
- if action_elem.tag != 'action':
- continue
- action_dict = {}
- action_type = action_elem.get( 'type', None )
- if action_type is not None:
- action_dict = recipe_manager.prepare_step( app=app,
- tool_dependency=tool_dependency,
- action_type=action_type,
- action_elem=action_elem,
- action_dict=action_dict,
- install_environment=install_environment,
- is_binary_download=is_binary_download )
- action_tuple = ( action_type, action_dict )
- if action_type == 'set_environment':
- if action_tuple not in actions:
- actions.append( action_tuple )
- else:
- actions.append( action_tuple )
- if actions:
- actions_dict[ 'actions' ] = actions
- if custom_fabfile_path is not None:
- # TODO: this is not yet supported or functional, but when it is handle it using the fabric api.
- # execute_custom_fabric_script( app, elem, custom_fabfile_path, install_dir, package_name=package_name )
- raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
- else:
- tool_dependency = install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict )
- return tool_dependency
-
-def execute_custom_fabric_script( app, elem, custom_fabfile_path, install_dir, package_name=None, **kwd ):
- """
- TODO: Handle this using the fabric api.
- Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.
- """
- if not os.path.exists( install_dir ):
- os.makedirs( install_dir )
- # Default value for env_dependency_path.
- env_dependency_path = install_dir
- method_name = elem.get( 'name', None )
- params_str = ''
- actions = []
- for param_elem in elem:
- param_name = param_elem.get( 'name' )
- if param_name:
- if param_name == 'actions':
- for action_elem in param_elem:
- actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) )
- if actions:
- params_str += 'actions=%s,' % encoding_util.tool_shed_encode( encoding_util.encoding_sep.join( actions ) )
- else:
- if param_elem.text:
- param_value = encoding_util.tool_shed_encode( param_elem.text )
- params_str += '%s=%s,' % ( param_name, param_value )
- if package_name:
- params_str += 'package_name=%s' % package_name
- else:
- params_str = params_str.rstrip( ',' )
- try:
- cmd = 'fab -f %s %s:%s' % ( custom_fabfile_path, method_name, params_str )
- returncode, message = run_subprocess( app, cmd )
- except Exception, e:
- return "Exception executing fabric script %s: %s. " % ( str( custom_fabfile_path ), str( e ) )
- if returncode:
- return message
- handle_environment_settings( app, tool_dependency, install_dir, cmd )
-
-def run_subprocess( app, cmd ):
- env = os.environ
- PYTHONPATH = env.get( 'PYTHONPATH', '' )
- if PYTHONPATH:
- env[ 'PYTHONPATH' ] = '%s:%s' % ( os.path.abspath( os.path.join( app.config.root, 'lib' ) ), PYTHONPATH )
- else:
- env[ 'PYTHONPATH' ] = os.path.abspath( os.path.join( app.config.root, 'lib' ) )
- message = ''
- tmp_name = tempfile.NamedTemporaryFile( prefix="tmp-toolshed-rs" ).name
- tmp_stderr = open( tmp_name, 'wb' )
- proc = subprocess.Popen( cmd, shell=True, env=env, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- tmp_stderr.close()
- if returncode:
- tmp_stderr = open( tmp_name, 'rb' )
- message = '%s\n' % str( tmp_stderr.read() )
- tmp_stderr.close()
- suc.remove_file( tmp_name )
- return returncode, message
-
-def set_environment( app, elem, tool_shed_repository, attr_tups_of_dependencies_for_install ):
- """
- Create a ToolDependency to set an environment variable. This is different from the process used to
- set an environment variable that is associated with a package. An example entry in a tool_dependencies.xml
- file is::
-
- <set_environment version="1.0">
- <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
- </set_environment>
- """
- # TODO: Add support for a repository dependency definition within this tool dependency type's tag set. This should look something like
- # the following. See the implementation of support for this in the tool dependency package type's method above.
- # This function is only called for set environment actions as defined below, not within an <install version="1.0"> tool
- # dependency type. Here is an example of the tag set this function does handle:
- # <action type="set_environment">
- # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR</environment_variable>
- # </action>
- # Here is an example of the tag set this function does not handle:
- # <set_environment version="1.0">
- # <repository toolshed="<tool shed>" name="<repository name>" owner="<repository owner>" changeset_revision="<changeset revision>" />
- # </set_environment>
- sa_session = app.install_model.context
- tool_dependencies = []
- env_var_version = elem.get( 'version', '1.0' )
- tool_shed_repository_install_dir = fabric_util.get_tool_shed_repository_install_dir( app, tool_shed_repository )
- for env_var_elem in elem:
- # Althoug we're in a loop here, this method will always return only a single ToolDependency or None.
- env_var_name = env_var_elem.get( 'name', None )
- # The value of env_var_name must match the text value of at least 1 <requirement> tag in the tool config's <requirements> tag set whose
- # "type" attribute is "set_environment" (e.g., <requirement type="set_environment">R_SCRIPT_PATH</requirement>).
- env_var_action = env_var_elem.get( 'action', None )
- if env_var_name and env_var_action:
- # Tool dependencies of type "set_environmnet" always have the version attribute set to None.
- attr_tup = ( env_var_name, None, 'set_environment' )
- if attr_tup in attr_tups_of_dependencies_for_install:
- install_dir = \
- tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dependency_type='set_environment',
- tool_dependency_name=env_var_name,
- tool_dependency_version=None )
- install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir,
- install_dir=install_dir )
- env_var_dict = td_common_util.create_env_var_dict( elem=env_var_elem,
- install_environment=install_environment )
- if env_var_dict:
- if not os.path.exists( install_dir ):
- os.makedirs( install_dir )
- status = app.install_model.ToolDependency.installation_status.INSTALLING
- tool_dependency = \
- tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=env_var_name,
- version=None,
- type='set_environment',
- status=status,
- set_status=True )
- if env_var_version == '1.0':
- # Create this tool dependency's env.sh file.
- env_file_builder = EnvFileBuilder( install_dir )
- return_code = env_file_builder.append_line( make_executable=True, **env_var_dict )
- if return_code:
- error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \
- ( str( tool_dependency.name ), str( return_code ) )
- log.debug( error_message )
- status = app.install_model.ToolDependency.installation_status.ERROR
- tool_dependency = \
- tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=status,
- error_message=error_message,
- remove_from_disk=False )
- else:
- if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.ERROR,
- app.install_model.ToolDependency.installation_status.INSTALLED ]:
- status = app.install_model.ToolDependency.installation_status.INSTALLED
- tool_dependency = \
- tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=status,
- error_message=None,
- remove_from_disk=False )
- log.debug( 'Environment variable %s set in %s for tool dependency %s.' % \
- ( str( env_var_name ), str( install_dir ), str( tool_dependency.name ) ) )
- else:
- error_message = 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).'
- status = app.install_model.ToolDependency.installation_status.ERROR
- tool_dependency = \
- tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=status,
- error_message=error_message,
- remove_from_disk=False )
- tool_dependencies.append( tool_dependency )
- return tool_dependencies
-
-def strip_path( fpath ):
- if not fpath:
- return fpath
- try:
- file_path, file_name = os.path.split( fpath )
- except:
- file_name = fpath
- return file_name
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py
@@ -0,0 +1,32 @@
+import logging
+import os
+import threading
+
+log = logging.getLogger( __name__ )
+
+
+class AsynchronousReader( threading.Thread ):
+ """
+ A helper class to implement asynchronous reading of a stream in a separate thread. Read lines are pushed
+ onto a queue to be consumed in another thread.
+ """
+
+ def __init__( self, fd, queue ):
+ threading.Thread.__init__( self )
+ self._fd = fd
+ self._queue = queue
+ self.lines = []
+
+ def run( self ):
+ """Read lines and put them on the queue."""
+ thread_lock = threading.Lock()
+ thread_lock.acquire()
+ for line in iter( self._fd.readline, '' ):
+ stripped_line = line.rstrip()
+ self.lines.append( stripped_line )
+ self._queue.put( stripped_line )
+ thread_lock.release()
+
+ def installation_complete( self ):
+ """Make sure there is more installation and compilation logging content expected."""
+ return not self.is_alive() and self._queue.empty()
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py
@@ -0,0 +1,95 @@
+import logging
+import os
+import stat
+
+log = logging.getLogger( __name__ )
+
+
+class EnvFileBuilder( object ):
+
+ def __init__( self, install_dir ):
+ self.install_dir = install_dir
+ self.return_code = 0
+
+ def append_line( self, make_executable=True, **kwd ):
+ env_var_dict = dict( **kwd )
+ env_entry, env_file = self.create_or_update_env_shell_file( self.install_dir, env_var_dict )
+ return_code = self.file_append( env_entry, env_file, make_executable=make_executable )
+ self.return_code = self.return_code or return_code
+ return self.return_code
+
+ @staticmethod
+ def create_or_update_env_shell_file( install_dir, env_var_dict ):
+ env_var_action = env_var_dict[ 'action' ]
+ env_var_value = env_var_dict[ 'value' ]
+ if env_var_action in [ 'prepend_to', 'set_to', 'append_to' ]:
+ env_var_name = env_var_dict[ 'name' ]
+ if env_var_action == 'prepend_to':
+ changed_value = '%s:$%s' % ( env_var_value, env_var_name )
+ elif env_var_action == 'set_to':
+ changed_value = '%s' % env_var_value
+ elif env_var_action == 'append_to':
+ changed_value = '$%s:%s' % ( env_var_name, env_var_value )
+ line = "%s=%s; export %s" % ( env_var_name, changed_value, env_var_name )
+ elif env_var_action == "source":
+ line = "if [ -f %s ] ; then . %s ; fi" % ( env_var_value, env_var_value )
+ else:
+ raise Exception( "Unknown shell file action %s" % env_var_action )
+ env_shell_file_path = os.path.join( install_dir, 'env.sh' )
+ return line, env_shell_file_path
+
+ def file_append( self, text, file_path, make_executable=True ):
+ """
+ Append a line to a file unless the line already exists in the file. This method creates the file if
+ it doesn't exist. If make_executable is True, the permissions on the file are set to executable by
+ the owner.
+ """
+ file_dir = os.path.dirname( file_path )
+ if not os.path.exists( file_dir ):
+ try:
+ os.makedirs( file_dir )
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ if os.path.exists( file_path ):
+ try:
+ new_env_file_contents = []
+ env_file_contents = file( file_path, 'r' ).readlines()
+ # Clean out blank lines from the env.sh file.
+ for line in env_file_contents:
+ line = line.rstrip()
+ if line:
+ new_env_file_contents.append( line )
+ env_file_contents = new_env_file_contents
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ else:
+ env_file_handle = open( file_path, 'w' )
+ env_file_handle.close()
+ env_file_contents = []
+ if make_executable:
+ # Explicitly set the file's executable bits.
+ try:
+ os.chmod( file_path, int( '111', base=8 ) | os.stat( file_path )[ stat.ST_MODE ] )
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ # Convert the received text to a list, in order to support adding one or more lines to the file.
+ if isinstance( text, basestring ):
+ text = [ text ]
+ for line in text:
+ line = line.rstrip()
+ if line and line not in env_file_contents:
+ env_file_contents.append( line )
+ try:
+ file( file_path, 'w' ).write( '\n'.join( env_file_contents ) )
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ return 0
+
+ def handle_action_shell_file_paths( self, action_dict ):
+ shell_file_paths = action_dict.get( 'action_shell_file_paths', [] )
+ for shell_file_path in shell_file_paths:
+ self.append_line( action="source", value=shell_file_path )
diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py
@@ -0,0 +1,273 @@
+import logging
+import os
+import Queue
+import shutil
+import subprocess
+import tempfile
+import threading
+import time
+from contextlib import contextmanager
+
+# TODO: eliminate the use of fabric here.
+from galaxy import eggs
+
+eggs.require( 'paramiko' )
+eggs.require( 'ssh' )
+eggs.require( 'Fabric' )
+
+from fabric.operations import _AttributeString
+from fabric import state
+from fabric.api import prefix
+
+from galaxy.util import DATABASE_MAX_STRING_SIZE
+from galaxy.util import DATABASE_MAX_STRING_SIZE_PRETTY
+from galaxy.util import shrink_string_by_size
+from galaxy.util import unicodify
+
+from tool_shed.galaxy_install.tool_dependencies import td_common_util
+from tool_shed.galaxy_install.tool_dependencies.recipe import asynchronous_reader
+
+log = logging.getLogger( __name__ )
+
+class InstallEnvironment( object ):
+ """Object describing the environment built up as part of the process of building and installing a package."""
+
+
+ def __init__( self, tool_shed_repository_install_dir, install_dir ):
+ """
+ The value of the received tool_shed_repository_install_dir is the root installation directory
+ of the repository containing the tool dependency, and the value of the received install_dir is
+ the root installation directory of the tool dependency.
+ """
+ self.env_shell_file_paths = []
+ self.install_dir = install_dir
+ self.tool_shed_repository_install_dir = tool_shed_repository_install_dir
+
+ def __call__( self ):
+ with settings( warn_only=True, **td_common_util.get_env_var_values( self ) ):
+ with prefix( self.__setup_environment() ):
+ yield
+
+ def add_env_shell_file_paths( self, paths ):
+ for path in paths:
+ self.env_shell_file_paths.append( str( path ) )
+
+ def build_command( self, command, action_type='shell_command' ):
+ """
+ Build command line for execution from simple command, but
+ configuring environment described by this object.
+ """
+ env_cmds = self.environment_commands( action_type )
+ return '\n'.join( env_cmds + [ command ] )
+
+ def close_file_descriptor( self, fd ):
+ """Attempt to close a file descriptor."""
+ start_timer = time.time()
+ error = ''
+ while True:
+ try:
+ fd.close()
+ break
+ except IOError, e:
+ # Undoubtedly close() was called during a concurrent operation on the same file object.
+ log.debug( 'Error closing file descriptor: %s' % str( e ) )
+ time.sleep( .5 )
+ current_wait_time = time.time() - start_timer
+ if current_wait_time >= 600:
+ error = 'Error closing file descriptor: %s' % str( e )
+ break
+ return error
+
+ def enqueue_output( self, stdout, stdout_queue, stderr, stderr_queue ):
+ """
+ This method places streamed stdout and stderr into a threaded IPC queue target. Received data
+ is printed and saved to that thread's queue. The calling thread can then retrieve the data using
+ thread.stdout and thread.stderr.
+ """
+ stdout_logger = logging.getLogger( 'install_environment.STDOUT' )
+ stderr_logger = logging.getLogger( 'install_environment.STDERR' )
+ for line in iter( stdout.readline, '' ):
+ output = line.rstrip()
+ stdout_logger.debug( output )
+ stdout_queue.put( output )
+ stdout_queue.put( None )
+ for line in iter( stderr.readline, '' ):
+ output = line.rstrip()
+ stderr_logger.debug( output )
+ stderr_queue.put( output )
+ stderr_queue.put( None )
+
+ def environment_commands( self, action_type ):
+ """Build a list of commands used to construct the environment described by this object."""
+ cmds = []
+ for env_shell_file_path in self.env_shell_file_paths:
+ if os.path.exists( env_shell_file_path ):
+ for env_setting in open( env_shell_file_path ):
+ cmds.append( env_setting.strip( '\n' ) )
+ else:
+ log.debug( 'Invalid file %s specified, ignoring %s action.' % ( str( env_shell_file_path ), str( action_type ) ) )
+ return cmds
+
+ def environment_dict( self, action_type='template_command' ):
+ env_vars = dict()
+ for env_shell_file_path in self.env_shell_file_paths:
+ if os.path.exists( env_shell_file_path ):
+ for env_setting in open( env_shell_file_path ):
+ env_string = env_setting.split( ';' )[ 0 ]
+ env_name, env_path = env_string.split( '=' )
+ env_vars[ env_name ] = env_path
+ else:
+ log.debug( 'Invalid file %s specified, ignoring template_command action.' % str( env_shell_file_path ) )
+ return env_vars
+
+ def handle_command( self, app, tool_dependency, cmd, return_output=False ):
+ """Handle a command and log the results."""
+ context = app.install_model.context
+ command = str( cmd )
+ output = self.handle_complex_command( command )
+ self.log_results( cmd, output, os.path.join( self.install_dir, td_common_util.INSTALLATION_LOG ) )
+ stdout = output.stdout
+ stderr = output.stderr
+ if len( stdout ) > DATABASE_MAX_STRING_SIZE:
+ print "Length of stdout > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY )
+ stdout = shrink_string_by_size( stdout, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+ if len( stderr ) > DATABASE_MAX_STRING_SIZE:
+ print "Length of stderr > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY )
+ stderr = shrink_string_by_size( stderr, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+ if output.return_code not in [ 0 ]:
+ tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR
+ if stderr:
+ tool_dependency.error_message = unicodify( stderr )
+ elif stdout:
+ tool_dependency.error_message = unicodify( stdout )
+ else:
+ # We have a problem if there was no stdout and no stderr.
+ tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % \
+ ( str( cmd ), str( output.return_code ) )
+ context.add( tool_dependency )
+ context.flush()
+ if return_output:
+ return output
+ return output.return_code
+
+ def handle_complex_command( self, command ):
+ """
+ Wrap subprocess.Popen in such a way that the stderr and stdout from running a shell command will
+ be captured and logged in nearly real time. This is similar to fabric.local, but allows us to
+ retain control over the process. This method is named "complex" because it uses queues and
+ threads to execute a command while capturing and displaying the output.
+ """
+ # Launch the command as subprocess. A bufsize of 1 means line buffered.
+ process_handle = subprocess.Popen( str( command ),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=1,
+ close_fds=False,
+ shell=True,
+ cwd=state.env[ 'lcwd' ] )
+ pid = process_handle.pid
+ # Launch the asynchronous readers of the process' stdout and stderr.
+ stdout_queue = Queue.Queue()
+ stdout_reader = asynchronous_reader.AsynchronousReader( process_handle.stdout, stdout_queue )
+ stdout_reader.start()
+ stderr_queue = Queue.Queue()
+ stderr_reader = asynchronous_reader.AsynchronousReader( process_handle.stderr, stderr_queue )
+ stderr_reader.start()
+ # Place streamed stdout and stderr into a threaded IPC queue target so it can
+ # be printed and stored for later retrieval when generating the INSTALLATION.log.
+ stdio_thread = threading.Thread( target=self.enqueue_output,
+ args=( process_handle.stdout,
+ stdout_queue,
+ process_handle.stderr,
+ stderr_queue ) )
+ thread_lock = threading.Lock()
+ thread_lock.acquire()
+ stdio_thread.start()
+ # Check the queues for output until there is nothing more to get.
+ start_timer = time.time()
+ while not stdout_reader.installation_complete() or not stderr_reader.installation_complete():
+ # Show what we received from standard output.
+ while not stdout_queue.empty():
+ try:
+ line = stdout_queue.get()
+ except Queue.Empty:
+ line = None
+ break
+ if line:
+ print line
+ start_timer = time.time()
+ else:
+ break
+ # Show what we received from standard error.
+ while not stderr_queue.empty():
+ try:
+ line = stderr_queue.get()
+ except Queue.Empty:
+ line = None
+ break
+ if line:
+ print line
+ start_timer = time.time()
+ else:
+ stderr_queue.task_done()
+ break
+ # Sleep a bit before asking the readers again.
+ time.sleep( .1 )
+ current_wait_time = time.time() - start_timer
+ if stdout_queue.empty() and stderr_queue.empty() and current_wait_time > td_common_util.NO_OUTPUT_TIMEOUT:
+ err_msg = "\nShutting down process id %s because it generated no output for the defined timeout period of %.1f seconds.\n" % \
+ ( pid, td_common_util.NO_OUTPUT_TIMEOUT )
+ stderr_reader.lines.append( err_msg )
+ process_handle.kill()
+ break
+ thread_lock.release()
+ # Wait until each of the threads we've started terminate. The following calls will block each thread
+ # until it terminates either normally, through an unhandled exception, or until the timeout occurs.
+ stdio_thread.join( td_common_util.NO_OUTPUT_TIMEOUT )
+ stdout_reader.join( td_common_util.NO_OUTPUT_TIMEOUT )
+ stderr_reader.join( td_common_util.NO_OUTPUT_TIMEOUT )
+ # Close subprocess' file descriptors.
+ error = self.close_file_descriptor( process_handle.stdout )
+ error = self.close_file_descriptor( process_handle.stderr )
+ stdout = '\n'.join( stdout_reader.lines )
+ stderr = '\n'.join( stderr_reader.lines )
+ # Handle error condition (deal with stdout being None, too)
+ output = _AttributeString( stdout.strip() if stdout else "" )
+ errors = _AttributeString( stderr.strip() if stderr else "" )
+ # Make sure the process has finished.
+ process_handle.poll()
+ output.return_code = process_handle.returncode
+ output.stderr = errors
+ return output
+
+ def log_results( self, command, fabric_AttributeString, file_path ):
+ """Write attributes of fabric.operations._AttributeString to a specified log file."""
+ if os.path.exists( file_path ):
+ logfile = open( file_path, 'ab' )
+ else:
+ logfile = open( file_path, 'wb' )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDOUT\n' % command )
+ logfile.write( str( fabric_AttributeString.stdout ) )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDERR\n' % command )
+ logfile.write( str( fabric_AttributeString.stderr ) )
+ logfile.write( "\n#############################################\n" )
+ logfile.close()
+
+ @contextmanager
+ def make_tmp_dir( self ):
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-mtd" )
+ yield work_dir
+ if os.path.exists( work_dir ):
+ try:
+ shutil.rmtree( work_dir )
+ except Exception, e:
+ log.exception( str( e ) )
+
+ def __setup_environment( self ):
+ return "&&".join( [ ". %s" % file for file in self.__valid_env_shell_file_paths() ] )
+
+ def __valid_env_shell_file_paths( self ):
+ return [ file for file in self.env_shell_file_paths if os.path.exists( file ) ]
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0