galaxy-commits
Threads by month
- ----- 2025 -----
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
April 2014
- 1 participants
- 261 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/09c44c3db98c/
Changeset: 09c44c3db98c
User: davebgx
Date: 2014-04-25 16:41:34
Summary: Fix tool shed functional tests for roles and email notifications.
Affected #: 1 file
diff -r 880974da3ca4d3d76bcf8b11184cb42c4aee95b7 -r 09c44c3db98cef9fa33b8e511e52928dcec5d2f5 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -…
[View More]525,7 +525,7 @@
def enable_email_alerts( self, repository, strings_displayed=[], strings_not_displayed=[] ):
repository_id = self.security.encode_id( repository.id )
params = dict( operation='Receive email alerts', id=repository_id )
- self.visit_url( '/repository/browse_repositories' )
+ self.visit_url( '/repository/browse_repositories', params )
self.check_for_strings( strings_displayed )
def escape_html( self, string, unescape=False ):
@@ -796,7 +796,7 @@
self.check_for_strings( strings_displayed, strings_not_displayed )
params = dict( operation='manage users and groups', id=self.security.encode_id( role.id ) )
url = '/admin/roles'
- self.visit_url( url )
+ self.visit_url( url, params )
strings_displayed = [ common.test_user_1_email, common.test_user_2_email ]
self.check_for_strings( strings_displayed, strings_not_displayed )
# As elsewhere, twill limits the possibility of submitting the form, this time due to not executing the javascript
https://bitbucket.org/galaxy/galaxy-central/commits/611117ed0a88/
Changeset: 611117ed0a88
User: davebgx
Date: 2014-04-25 16:41:53
Summary: Fix functional tests for library templates.
Affected #: 2 files
diff -r 09c44c3db98cef9fa33b8e511e52928dcec5d2f5 -r 611117ed0a88654c736bc5722016b81874475c95 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -2040,16 +2040,13 @@
dis-inherit your template, call the manage_library_template_inheritance() below immediately after you call this
method in your test code. Templates added to Requesttype objects are always inherited to samples.
"""
- if item_type == 'library':
- url = "%s/library_common/add_template?cntrller=%s&item_type=%s&form_type=%s&library_id=%s" % \
- ( self.url, cntrller, item_type, form_type, library_id )
- elif item_type == 'folder':
- url = "%s/library_common/add_template?cntrller=%s&item_type=%s&form_type=%s&library_id=%s&folder_id=%s" % \
- ( self.url, cntrller, item_type, form_type, library_id, folder_id )
+ params = dict( cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id )
+ url = "/library_common/add_template"
+ if item_type == 'folder':
+ params[ 'folder_id' ] = folder_id
elif item_type == 'ldda':
- url = "%s/library_common/add_template?cntrller=%s&item_type=%s&form_type=%s&library_id=%s&folder_id=%s&ldda_id=%s" % \
- ( self.url, cntrller, item_type, form_type, library_id, folder_id, ldda_id )
- self.visit_url( url )
+ params[ 'ldda_id' ] = ldda_id
+ self.visit_url( url, params )
self.check_page_for_string ( "Select a template for the" )
self.refresh_form( "form_id", form_id )
# For some unknown reason, twill barfs if the form number ( 1 ) is used in the following
diff -r 09c44c3db98cef9fa33b8e511e52928dcec5d2f5 -r 611117ed0a88654c736bc5722016b81874475c95 test/functional/test_library_templates.py
--- a/test/functional/test_library_templates.py
+++ b/test/functional/test_library_templates.py
@@ -72,12 +72,14 @@
# Pass number of options we want in our SelectField
num_options = 2
# Create form for library template
+ strings_displayed_after_submit = [ "The form '%s' has been updated with the changes." % type ]
self.create_form( name=type,
description=form_desc,
form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
field_type=type,
num_options=num_options,
- field_name=field_name )
+ field_name=field_name,
+ strings_displayed_after_submit=strings_displayed_after_submit )
# Get all of the new form definitions for later use
global AddressField_form
AddressField_form = get_form( 'AddressField' )
@@ -145,7 +147,7 @@
# Add a template containing an AddressField to library1
self.add_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
form_id=self.security.encode_id( AddressField_form.id ),
form_name=AddressField_form.name,
library_id=self.security.encode_id( library1.id ) )
@@ -287,7 +289,7 @@
# Add a template containing an CheckboxField to library1
self.add_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
form_id=self.security.encode_id( CheckboxField_form.id ),
form_name=CheckboxField_form.name,
library_id=self.security.encode_id( library2.id ) )
@@ -363,7 +365,7 @@
# Logged in as admin_user
self.add_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
form_id=self.security.encode_id( SelectField_form.id ),
form_name=SelectField_form.name,
library_id=self.security.encode_id( library3.id ) )
@@ -464,7 +466,7 @@
# Add an inheritable template to library4
self.add_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
form_id=self.security.encode_id( TextArea_form.id ),
form_name=TextArea_form.name,
library_id=self.security.encode_id( library4.id ) )
@@ -533,7 +535,7 @@
# Add an inheritable template to library5
self.add_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
form_id=self.security.encode_id( TextField_form.id ),
form_name=TextField_form.name,
library_id=self.security.encode_id( library5.id ) )
@@ -602,7 +604,7 @@
# Currently there is only a TextField, and we'll add a TextArea.
self.edit_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
library_id=self.security.encode_id( library5.id ),
field_type='TextArea',
field_label_1=TextArea_form.name,
@@ -647,7 +649,7 @@
# We won't select an option since we have no workflow to select
self.add_template( cntrller='library_admin',
item_type='library',
- form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE.replace( ' ', '+' ),
+ form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
form_id=self.security.encode_id( WorkflowField_form.id ),
form_name=WorkflowField_form.name,
library_id=self.security.encode_id( library6.id ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: Jeremy Goecks: Simplification for dowsampler in bam data provider
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/880974da3ca4/
Changeset: 880974da3ca4
User: Jeremy Goecks
Date: 2014-04-25 03:24:33
Summary: Simplification for dowsampler in bam data provider
Affected #: 1 file
diff -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 -r 880974da3ca4d3d76bcf8b11184cb42c4aee95b7 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/…
[View More]data_providers/genome.py
@@ -952,8 +952,7 @@
# Convert threshold to N for stepping through iterator.
n = int( 1/threshold )
- for e in itertools.islice( read_iterator, None, None, n ):
- yield e
+ return itertools.islice( read_iterator, None, None, n )
# Alternatate and much slower implementation that looks for pending pairs.
'''
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: greg: Move some hg utility functions out of shed_util_common and into hg_util.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d1dd573cdce2/
Changeset: d1dd573cdce2
User: greg
Date: 2014-04-24 21:45:55
Summary: Move some hg utility functions out of shed_util_common and into hg_util.
Affected #: 12 files
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ …
[View More]b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1855,7 +1855,7 @@
repo = hg.repository( hg_util.get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
repository_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
- suc.update_repository( repo, latest_ctx_rev )
+ hg_util.update_repository( repo, latest_ctx_rev )
# Remove old Data Manager entries
if repository.includes_data_managers:
data_manager_util.remove_from_data_manager( trans.app, repository )
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/galaxy/webapps/tool_shed/controllers/hg.py
--- a/lib/galaxy/webapps/tool_shed/controllers/hg.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py
@@ -2,7 +2,7 @@
from galaxy import web
from galaxy.web.base.controller import BaseUIController
from tool_shed.util.shed_util_common import get_repository_by_name_and_owner
-from tool_shed.util.shed_util_common import update_repository
+from tool_shed.util.hg_util import update_repository
from tool_shed.util.metadata_util import set_repository_metadata
from galaxy import eggs
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -722,7 +722,7 @@
repository = suc.get_repository_in_tool_shed( trans, id )
repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
# Update repository files for browsing.
- suc.update_repository( repo )
+ hg_util.update_repository( repo )
changeset_revision = repository.tip( trans.app )
metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, id, changeset_revision, metadata_only=True )
repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
@@ -2743,7 +2743,7 @@
commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
suc.handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- suc.update_repository( repo )
+ hg_util.update_repository( repo )
# Get the new repository tip.
repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
if tip == repository.tip( trans.app ):
@@ -3117,7 +3117,7 @@
else:
ctx_child = None
diffs = []
- options_dict = suc.get_mercurial_default_options_dict( 'diff' )
+ options_dict = hg_util.get_mercurial_default_options_dict( 'diff' )
# Not quite sure if the following settings make any difference, but with a combination of them and the size check on each
# diff, we don't run out of memory when viewing the changelog of the cisortho2 repository on the test tool shed.
options_dict[ 'maxfile' ] = suc.MAXDIFFSIZE
@@ -3252,7 +3252,11 @@
else:
message += malicious_error
status = 'error'
- containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = container_util.build_repository_containers_for_tool_shed( trans,
+ repository,
+ changeset_revision,
+ repository_dependencies,
+ repository_metadata )
repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
heads = suc.get_repository_heads( repo )
return trans.fill_template( '/webapps/tool_shed/repository/view_repository.mako',
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -226,7 +226,7 @@
admin_only=admin_only )
if ok:
# Update the repository files for browsing.
- suc.update_repository( repo )
+ hg_util.update_repository( repo )
# Get the new repository tip.
if tip == repository.tip( trans.app ):
message = 'No changes to repository. '
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -2,6 +2,7 @@
Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
"""
+import json
import os
import shutil
import tempfile
@@ -9,12 +10,11 @@
import logging
from galaxy import util
from galaxy.tools import ToolSection
-from galaxy.util.json import from_json_string
-from galaxy.util.json import to_json_string
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_install_util
from tool_shed.util import common_util
from tool_shed.util import datatype_util
+from tool_shed.util import hg_util
from tool_shed.util import metadata_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
@@ -505,8 +505,10 @@
tool_shed_repository.owner,
tool_shed_repository.installed_changeset_revision )
if not cloned_ok:
- suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status.CLONING )
- cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ suc.update_tool_shed_repository_status( self.app,
+ tool_shed_repository,
+ self.app.install_model.ToolShedRepository.installation_status.CLONING )
+ cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok and not is_installed:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
@@ -525,7 +527,7 @@
( self.tool_shed_url, tool_shed_repository.name, self.repository_owner, tool_shed_repository.installed_changeset_revision )
text = common_util.tool_shed_get( self.app, self.tool_shed_url, url )
if text:
- tool_version_dicts = from_json_string( text )
+ tool_version_dicts = json.loads( text )
tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
# Set the tool versions since they seem to be missing for this repository in the tool shed.
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -560,14 +560,17 @@
if isinstance( repo_info_dict, basestring ):
repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.install_model.ToolShedRepository.installation_status.CLONING )
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.install_model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url,
+ tool_shed_repository.installed_changeset_revision )
clone_dir = os.path.join( tool_path, relative_clone_dir )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( tool_path, relative_install_dir )
- cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which it can be updated.
@@ -577,7 +580,7 @@
if current_ctx_rev != ctx_rev:
repo = hg.repository( hg_util.get_configured_ui(), path=os.path.abspath( install_dir ) )
pull_repository( repo, repository_clone_url, current_changeset_revision )
- suc.update_repository( repo, ctx_rev=current_ctx_rev )
+ hg_util.update_repository( repo, ctx_rev=current_ctx_rev )
handle_repository_contents( trans,
tool_shed_repository=tool_shed_repository,
tool_path=tool_path,
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -112,7 +112,7 @@
repo_dir = repository.repo_path( app )
repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-cafr" )
- cloned_ok, error_message = suc.clone_repository( repo_dir, work_dir, changeset_revision )
+ cloned_ok, error_message = hg_util.clone_repository( repo_dir, work_dir, changeset_revision )
if cloned_ok:
# Iterate through all the directories in the cloned changeset revision and determine whether there's a
# directory named test-data. If this directory is not present update the metadata record for the changeset
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/util/export_util.py
--- a/lib/tool_shed/util/export_util.py
+++ b/lib/tool_shed/util/export_util.py
@@ -39,7 +39,7 @@
def archive_repository_revision( trans, ui, repository, archive_dir, changeset_revision ):
'''Create an un-versioned archive of a repository.'''
repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
- options_dict = suc.get_mercurial_default_options_dict( 'archive' )
+ options_dict = hg_util.get_mercurial_default_options_dict( 'archive' )
options_dict[ 'rev' ] = changeset_revision
error_message = ''
return_code = None
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/util/hg_util.py
--- a/lib/tool_shed/util/hg_util.py
+++ b/lib/tool_shed/util/hg_util.py
@@ -3,6 +3,7 @@
from time import gmtime
from time import strftime
+from galaxy.util import listify
from galaxy import eggs
eggs.require( 'mercurial' )
@@ -11,6 +12,26 @@
from mercurial import hg
from mercurial import ui
+log = logging.getLogger( __name__ )
+
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+ """
+ Clone the repository up to the specified changeset_revision. No subsequent revisions will be
+ present in the cloned repository.
+ """
+ try:
+ commands.clone( get_configured_ui(),
+ str( repository_clone_url ),
+ dest=str( repository_file_dir ),
+ pull=True,
+ noupdate=False,
+ rev=listify( str( ctx_rev ) ) )
+ return True, None
+ except Exception, e:
+ error_message = 'Error cloning repository: %s' % str( e )
+ log.debug( error_message )
+ return False, error_message
+
def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
"""Retrieve a specified changectx from a repository."""
for changeset in repo.changelog:
@@ -28,7 +49,19 @@
# quiet = True
_ui.setconfig( 'ui', 'quiet', True )
return _ui
-
+
+def get_mercurial_default_options_dict( command, command_table=None, **kwd ):
+ '''Borrowed from repoman - get default parameters for a mercurial command.'''
+ if command_table is None:
+ command_table = commands.table
+ possible = cmdutil.findpossible( command, command_table )
+ if len( possible ) != 1:
+ raise Exception, 'unable to find mercurial command "%s"' % command
+ default_options_dict = dict( ( r[ 1 ].replace( '-', '_' ), r[ 2 ] ) for r in possible[ possible.keys()[ 0 ] ][ 1 ][ 1 ] )
+ for option in kwd:
+ default_options_dict[ option ] = kwd[ option ]
+ return default_options_dict
+
def get_readable_ctx_date( ctx ):
"""Convert the date of the changeset (the received ctx) to a human-readable date."""
t, tz = ctx.date()
@@ -106,3 +139,21 @@
rev = '-1'
label = "-1:%s" % changeset_revision
return rev, label
+
+def update_repository( repo, ctx_rev=None ):
+ """
+ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
+ changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+ """
+ # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ # The codes used to show the status of files are as follows.
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
+ # purging is not supported by the mercurial API.
+ commands.update( get_configured_ui(), repo, rev=ctx_rev )
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1799,7 +1799,7 @@
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = repo.changectx( changeset )
log.debug( "Cloning repository changeset revision: %s", str( ctx.rev() ) )
- cloned_ok, error_message = suc.clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
+ cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
if cloned_ok:
log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
current_metadata_dict, invalid_tups = generate_metadata_for_changeset_revision( app=trans.app,
@@ -1811,7 +1811,8 @@
resetting_all_metadata_on_repository=True,
updating_installed_repository=False,
persist=False )
- # We'll only display error messages for the repository tip (it may be better to display error messages for each installable changeset revision).
+ # We'll only display error messages for the repository tip (it may be better to display error
+ # messages for each installable changeset revision).
if current_changeset_revision == repository.tip( trans.app ):
invalid_file_tups.extend( invalid_tups )
if current_metadata_dict:
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -25,10 +25,7 @@
from galaxy import eggs
eggs.require( 'mercurial' )
-from mercurial import cmdutil
-from mercurial import commands
from mercurial import hg
-from mercurial import ui
eggs.require( 'markupsafe' )
import markupsafe
@@ -139,15 +136,6 @@
return repository_metadata.malicious
return False
-def changeset_is_valid( app, repository, changeset_revision ):
- """Make sure a changeset hash is valid for a specified repository."""
- repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( app ) )
- for changeset in repo.changelog:
- changeset_hash = str( repo.changectx( changeset ) )
- if changeset_revision == changeset_hash:
- return True
- return False
-
def check_or_update_tool_shed_status_for_installed_repository( trans, repository ):
updated = False
tool_shed_status_dict = get_tool_shed_status_for_installed_repository( trans.app, repository )
@@ -162,21 +150,6 @@
ok = False
return ok, updated
-def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
- """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
- try:
- commands.clone( hg_util.get_configured_ui(),
- str( repository_clone_url ),
- dest=str( repository_file_dir ),
- pull=True,
- noupdate=False,
- rev=util.listify( str( ctx_rev ) ) )
- return True, None
- except Exception, e:
- error_message = 'Error cloning repository: %s' % str( e )
- log.debug( error_message )
- return False, error_message
-
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
"""Persist the current in-memory list of config_elems to a file named by the value of config_filename."""
fd, filename = tempfile.mkstemp( prefix="tmp-toolshed-cetxf" )
@@ -655,18 +628,6 @@
return changeset_revisions[ -1 ]
return INITIAL_CHANGELOG_HASH
-def get_mercurial_default_options_dict( command, command_table=None, **kwd ):
- '''Borrowed from repoman - get default parameters for a mercurial command.'''
- if command_table is None:
- command_table = commands.table
- possible = cmdutil.findpossible( command, command_table )
- if len( possible ) != 1:
- raise Exception, 'unable to find mercurial command "%s"' % command
- default_options_dict = dict( ( r[ 1 ].replace( '-', '_' ), r[ 2 ] ) for r in possible[ possible.keys()[ 0 ] ][ 1 ][ 1 ] )
- for option in kwd:
- default_options_dict[ option ] = kwd[ option ]
- return default_options_dict
-
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
"""Return a named temporary file created from a specified file with a given name included in a repository changeset revision."""
filename = strip_path( filename )
@@ -1784,24 +1745,6 @@
config_elems.append( elem )
config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
-def update_repository( repo, ctx_rev=None ):
- """
- Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
- changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
- """
- # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
- # The codes used to show the status of files are as follows.
- # M = modified
- # A = added
- # R = removed
- # C = clean
- # ! = deleted, but still tracked
- # ? = not tracked
- # I = ignored
- # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
- # purging is not supported by the mercurial API.
- commands.update( hg_util.get_configured_ui(), repo, rev=ctx_rev )
-
def update_tool_shed_repository_status( app, tool_shed_repository, status, error_message=None ):
"""Update the status of a tool shed repository in the process of being installed into Galaxy."""
context = app.install_model.context
diff -r 203623b931507dcd182e2521911ab036021faf61 -r d1dd573cdce2bd122cbb0e98e947dc7992fe3ed0 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -19,6 +19,7 @@
from galaxy.web import security
from tool_shed.util.encoding_util import tool_shed_encode
from tool_shed.util import shed_util_common as suc
+from tool_shed.util import hg_util
from tool_shed.util import xml_util
from galaxy import eggs
@@ -260,7 +261,7 @@
def clone_repository( self, repository, destination_path ):
url = '%s/repos/%s/%s' % ( self.url, repository.user.username, repository.name )
- success, message = suc.clone_repository( url, destination_path, self.get_repository_tip( repository ) )
+ success, message = hg_util.clone_repository( url, destination_path, self.get_repository_tip( repository ) )
assert success is True, message
def commit_and_push( self, repository, hgrepo, options, username, password ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: greg: Fix for the execute_step() method in the SetEnvironment class.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/203623b93150/
Changeset: 203623b93150
User: greg
Date: 2014-04-24 21:11:43
Summary: Fix for the execute_step() method in the SetEnvironment class.
Affected #: 1 file
diff -r 99056011d2dbbfa64f8b9ab56871e1d4da279f69 -r 203623b931507dcd182e2521911ab036021faf61 lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/…
[View More]step_handler.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
@@ -450,14 +450,20 @@
env_var_dicts = action_dict[ 'environment_variable' ]
for env_var_dict in env_var_dicts:
# Check for the presence of the $ENV[] key string and populate it if possible.
- env_var_dict = self.handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, cmds )
+ env_var_dict = self.handle_environment_variables( app=app,
+ install_environment=install_environment,
+ tool_dependency=tool_dependency,
+ install_dir=install_dir,
+ env_var_dict=env_var_dict,
+ set_prior_environment_commands=cmds )
env_file_builder.append_line( **env_var_dict )
# The caller should check the status of the returned tool_dependency since return_code is not
# returned by this function.
return_code = env_file_builder.return_code
return tool_dependency, None, None
- def handle_environment_variables( self, app, tool_dependency, install_dir, env_var_dict, set_prior_environment_commands ):
+ def handle_environment_variables( self, app, install_environment, tool_dependency, install_dir, env_var_dict,
+ set_prior_environment_commands ):
"""
This method works with with a combination of three tool dependency definition tag sets, which are defined
in the tool_dependencies.xml file in the order discussed here. The example for this discussion is the
@@ -539,7 +545,6 @@
set_prior_environment_commands.append( 'echo %s: $%s' % ( inherited_env_var_name, inherited_env_var_name ) )
command = ' ; '.join( set_prior_environment_commands )
# Run the command and capture the output.
- install_environment = recipe_manager.InstallEnvironment()
command_return = install_environment.handle_command( app, tool_dependency, install_dir, command, return_output=True )
# And extract anything labeled with the name of the environment variable we're populating here.
if '%s: ' % inherited_env_var_name in command_return:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: greg: Fix tool shed unit tests I broke with my last commit.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/99056011d2db/
Changeset: 99056011d2db
User: greg
Date: 2014-04-24 19:45:04
Summary: Fix tool shed unit tests I broke with my last commit.
Affected #: 2 files
diff -r ffafb6350791453b31c841e5462314bcd04ffa88 -r 99056011d2dbbfa64f8b9ab56871e1d4da279f69 test/unit/tool_shed_unit_tests/test_fabric_util.py
--- a/test/unit/tool_shed_unit_tests/test_fabric_util.py
+++ b/test/unit/…
[View More]tool_shed_unit_tests/test_fabric_util.py
@@ -1,6 +1,5 @@
from contextlib import contextmanager
-from tool_shed.galaxy_install.recipe.recipe_manager import EnvFileBuilder
-from tool_shed.galaxy_install.tool_dependencies import fabric_util
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder
def test_env_file_builder():
@@ -10,10 +9,10 @@
mock_return = dict(value=0)
def mock_file_append( text, file_path, **kwds ):
- added_lines.append(text)
- return mock_return["value"]
+ added_lines.append( text )
+ return mock_return[ "value" ]
- with __mock_fabric_util_method("file_append", mock_file_append):
+ with __mock_env_file_builder_method( env_file_builder, "file_append", mock_file_append ):
env_file_builder.append_line( name="PATH", action="prepend_to", value="/usr/bin/local/R" )
assert added_lines == [ "PATH=/usr/bin/local/R:$PATH; export PATH" ]
assert env_file_builder.return_code == 0
@@ -37,10 +36,10 @@
## Poor man's mocking. Need to get a real mocking library as real Galaxy development
## dependnecy.
@contextmanager
-def __mock_fabric_util_method(name, mock_method):
- real_method = getattr(fabric_util, name)
+def __mock_env_file_builder_method( env_file_builder, name, mock_method ):
+ real_method = getattr( env_file_builder, name )
try:
- setattr(fabric_util, name, mock_method)
+ setattr( env_file_builder, name, mock_method )
yield
finally:
- setattr(fabric_util, name, real_method)
+ setattr( env_file_builder, name, real_method )
diff -r ffafb6350791453b31c841e5462314bcd04ffa88 -r 99056011d2dbbfa64f8b9ab56871e1d4da279f69 test/unit/tool_shed_unit_tests/test_td_common_util.py
--- a/test/unit/tool_shed_unit_tests/test_td_common_util.py
+++ b/test/unit/tool_shed_unit_tests/test_td_common_util.py
@@ -4,7 +4,8 @@
from tool_shed.galaxy_install.tool_dependencies import fabric_util
from tool_shed.galaxy_install.tool_dependencies import td_common_util
-from tool_shed.galaxy_install.recipe.recipe_manager import EnvFileBuilder
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder
+
TEST_DEPENDENCIES_DIR = "/opt/galaxy/dependencies"
TEST_INSTALL_DIR = "%s/test_install_dir" % TEST_DEPENDENCIES_DIR
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: davebgx: Get tool dependency status from the install model.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ffafb6350791/
Changeset: ffafb6350791
User: davebgx
Date: 2014-04-24 17:24:24
Summary: Get tool dependency status from the install model.
Affected #: 1 file
diff -r cf9cd1027e2a7e3f29201125fbbacf929ef55d0b -r ffafb6350791453b31c841e5462314bcd04ffa88 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/…
[View More]tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -86,7 +86,7 @@
install_dir=install_dir,
current_dir=current_dir,
initial_download=False )
- if tool_dependency.status in [ app.model.ToolDependency.installation_status.ERROR ]:
+ if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.ERROR ]:
# If the tool_dependency status is in an error state, return it with no additional
# processing.
return tool_dependency
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: davebgx: Update tool shed twilltestcase methods to reflect changes in the base twilltestcase's visit_url method. Fix functional tests for the tool shed. Update test tool dependencies to not actually compile EMBOSS and freebayes.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cf9cd1027e2a/
Changeset: cf9cd1027e2a
User: davebgx
Date: 2014-04-24 17:23:03
Summary: Update tool shed twilltestcase methods to reflect changes in the base twilltestcase's visit_url method. Fix functional tests for the tool shed. Update test tool dependencies to not actually compile EMBOSS and freebayes.
Affected #: 4 files
diff -r f1cb12e876c659c456c0b10f577494f79211b7f4 -r …
[View More]cf9cd1027e2a7e3f29201125fbbacf929ef55d0b test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -284,30 +284,6 @@
category = test_db_util.get_category_by_name( kwd[ 'name' ] )
return category
- def create_checkbox_query_string( self, field_name, value ):
- '''
- From galaxy.web.form_builder.CheckboxField:
- The hidden field is necessary because if the check box is not checked on the form, it will
- not be included in the request params. The hidden field ensure that this will happen. When
- parsing the request, the value 'true' in the hidden field actually means it is NOT checked.
- See the is_checked() method below. The prefix is necessary in each case to ensure functional
- correctness when the param is inside a conditional.
-
- This may look strange upon initial inspection, but see the comments in the get_html() method
- above for clarification. Basically, if value is not True, then it will always be a list with
- 2 input fields ( a checkbox and a hidden field ) if the checkbox is checked. If it is not
- checked, then value will be only the hidden field.
-
- The create_checkbox_query_string method emulates the described behavior with URL query parameters.
- This is currently necessary because twill does not correctly parse certain forms, so the test
- method has to visit the intended form target "manually".
- '''
- field_value = str( value ).lower()
- if value:
- return '%s=%s&%s=%s' % ( field_name, field_value, field_name, field_value )
- else:
- return '%s=%s' % ( field_name, field_value )
-
def create_repository_dependency( self,
repository=None,
repository_tuples=[],
@@ -547,7 +523,8 @@
def enable_email_alerts( self, repository, strings_displayed=[], strings_not_displayed=[] ):
repository_id = self.security.encode_id( repository.id )
- self.visit_url( '/repository/browse_repositories?operation=Receive+email+alerts&id=%s' % repository_id )
+ params = dict( operation='Receive email alerts', id=repository_id )
+ self.visit_url( '/repository/browse_repositories' )
self.check_for_strings( strings_displayed )
def escape_html( self, string, unescape=False ):
@@ -816,15 +793,19 @@
strings_not_displayed = []
self.visit_url( '/admin/roles' )
self.check_for_strings( strings_displayed, strings_not_displayed )
- url = '/admin/roles?operation=manage+users+and+groups&id=%s' % self.security.encode_id( role.id )
+ params = dict( operation='manage users and groups', id=self.security.encode_id( role.id ) )
+ url = '/admin/roles'
self.visit_url( url )
strings_displayed = [ common.test_user_1_email, common.test_user_2_email ]
self.check_for_strings( strings_displayed, strings_not_displayed )
# As elsewhere, twill limits the possibility of submitting the form, this time due to not executing the javascript
# attached to the role selection form. Visit the action url directly with the necessary parameters.
- url = '/admin/manage_users_and_groups_for_role?id=%s&in_users=%d&operation=manage+users+and+groups&role_members_edit_button=Save' % \
- ( self.security.encode_id( role.id ), user.id )
- self.visit_url( url )
+ params = dict( id=self.security.encode_id( role.id ),
+ in_users=user.id,
+ operation='manage users and groups',
+ role_members_edit_button='Save' )
+ url = '/admin/manage_users_and_groups_for_role'
+ self.visit_url( url, params )
strings_displayed = [ "Role '%s' has been updated" % role.name ]
self.check_for_strings( strings_displayed, strings_not_displayed )
@@ -1050,8 +1031,9 @@
self.check_for_strings( strings_displayed, strings_not_displayed )
def reactivate_repository( self, installed_repository ):
- url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_repository.id )
- self.visit_galaxy_url( url )
+ params = dict( operation='activate or reinstall', id=self.security.encode_id( installed_repository.id ) )
+ url = '/admin_toolshed/browse_repositories'
+ self.visit_galaxy_url( url, params )
strings_displayed = [ installed_repository.name, 'repository has been activated' ]
self.check_for_strings( strings_displayed, [] )
@@ -1068,12 +1050,12 @@
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
# Build the url that will simulate a filled-out form being submitted. Due to a limitation in twill, the reselect_tool_panel_section
# form doesn't get parsed correctly.
- repo_dependencies = self.create_checkbox_query_string( field_name='install_repository_dependencies', value=install_repository_dependencies )
- tool_dependencies = self.create_checkbox_query_string( field_name='install_tool_dependencies', value=install_tool_dependencies )
encoded_repository_id = self.security.encode_id( installed_repository.id )
- url = '/admin_toolshed/reinstall_repository?id=%s&%s&%s&no_changes=%s&new_tool_panel_section_label=%s' % \
- ( encoded_repository_id, repo_dependencies, tool_dependencies, str( no_changes ), new_tool_panel_section_label )
- self.visit_galaxy_url( url )
+ params = dict( id=encoded_repository_id, no_changes=no_changes, new_tool_panel_section_label=new_tool_panel_section_label )
+ checkbox_params = dict( install_repository_dependencies=install_repository_dependencies,
+ install_tool_dependencies=install_tool_dependencies )
+ url = '/admin_toolshed/reinstall_repository'
+ self.visit_galaxy_url( url, params=params, checkbox_params=checkbox_params )
# Manually initiate the install process, as with installing a repository. See comments in the
# initiate_installation_process method for details.
repository_ids = self.initiate_installation_process( install_tool_dependencies,
@@ -1429,8 +1411,9 @@
for tool in installed_repository.metadata[ 'tools' ]:
strings = list( strings_displayed )
strings.extend( [ tool[ 'id' ], tool[ 'description' ], tool[ 'version' ], tool[ 'guid' ], tool[ 'name' ] ] )
- url = '/admin_toolshed/view_tool_metadata?repository_id=%s&tool_id=%s' % ( repository_id, urllib.quote_plus( tool[ 'id' ] ) )
- self.visit_galaxy_url( url )
+ params = dict( repository_id=repository_id, tool_id=tool[ 'id' ] )
+ url = '/admin_toolshed/view_tool_metadata'
+ self.visit_galaxy_url( url, params )
self.check_for_strings( strings, strings_not_displayed )
def verify_unchanged_repository_metadata( self, repository ):
@@ -1451,9 +1434,9 @@
self.visit_galaxy_url( url )
self.check_for_strings( strings, strings_not_displayed )
- def visit_galaxy_url( self, url ):
+ def visit_galaxy_url( self, url, params=None, checkbox_params=None ):
url = '%s%s' % ( self.galaxy_url, url )
- self.visit_url( url )
+ self.visit_url( url, params=params, checkbox_params=checkbox_params )
def wait_for_repository_installation( self, repository_ids ):
final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
diff -r f1cb12e876c659c456c0b10f577494f79211b7f4 -r cf9cd1027e2a7e3f29201125fbbacf929ef55d0b test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
--- a/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
@@ -111,8 +111,9 @@
convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
strings_displayed=[ 'Handle repository dependencies', 'convert_chars_1087', self.get_repository_tip( convert_repository ) ]
# Due to twill's limitations, only check for strings on the (redirected) reselect tool panel section page, don't actually reinstall.
- url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_column_repository.id )
- self.visit_galaxy_url( url )
+ params = dict( operation='activate or reinstall', id=self.security.encode_id( installed_column_repository.id ) )
+ url = '/admin_toolshed/browse_repositories'
+ self.visit_galaxy_url( url, params )
self.check_for_strings( strings_displayed )
strings_not_displayed = [ 'column_maker_1087' ]
- self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
\ No newline at end of file
diff -r f1cb12e876c659c456c0b10f577494f79211b7f4 -r cf9cd1027e2a7e3f29201125fbbacf929ef55d0b test/tool_shed/test_data/emboss/emboss.tar
Binary file test/tool_shed/test_data/emboss/emboss.tar has changed
diff -r f1cb12e876c659c456c0b10f577494f79211b7f4 -r cf9cd1027e2a7e3f29201125fbbacf929ef55d0b test/tool_shed/test_data/freebayes/tool_dependencies.xml
--- a/test/tool_shed/test_data/freebayes/tool_dependencies.xml
+++ b/test/tool_shed/test_data/freebayes/tool_dependencies.xml
@@ -3,13 +3,7 @@
<package name="freebayes" version="0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8"><install version="1.0"><actions>
- <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
- <action type="shell_command">git reset --hard 9696d0ce8a962f7bb61c4791be5ce44312b81cf8</action>
- <action type="shell_command">make</action>
- <action type="move_directory_files">
- <source_directory>bin</source_directory>
- <destination_directory>$INSTALL_DIR/bin</destination_directory>
- </action>
+ <action type="shell_command">echo "Success."</action><action type="set_environment"><environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable></action>
@@ -23,17 +17,7 @@
<package name="samtools" version="0.1.18"><install version="1.0"><actions>
- <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
- <action type="shell_command">sed -i .bak -e 's/-lcurses/-lncurses/g' Makefile</action>
- <action type="shell_command">make</action>
- <action type="move_file">
- <source>samtools</source>
- <destination>$INSTALL_DIR/bin</destination>
- </action>
- <action type="move_file">
- <source>misc/maq2sam-long</source>
- <destination>$INSTALL_DIR/bin</destination>
- </action>
+ <action type="shell_command">echo "Success."</action><action type="set_environment"><environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable></action>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: davebgx: Fix typo in step handler.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f1cb12e876c6/
Changeset: f1cb12e876c6
User: davebgx
Date: 2014-04-24 17:06:59
Summary: Fix typo in step handler.
Affected #: 1 file
diff -r 3b6e94371b8a8e996cc8e317de8cbfe45790b94e -r f1cb12e876c659c456c0b10f577494f79211b7f4 lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
+++ b/lib/tool_shed/…
[View More]galaxy_install/tool_dependencies/recipe/step_handler.py
@@ -592,7 +592,7 @@
install_environment.add_env_shell_file_paths( action_dict[ 'env_shell_file_paths' ] )
return tool_dependency, None, None
- def prepare_step( self, app, tool_dependency, action_elem, action_dict, install_dir, is_binary_downloa ):
+ def prepare_step( self, app, tool_dependency, action_elem, action_dict, install_dir, is_binary_download ):
# <action type="set_environment_for_install">
# <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056">
# <package name="numpy" version="1.7.1" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
commit/galaxy-central: dannon: Add simplejson back again until 'anyjson' changes are pushed through bioblend.
by commits-noreply@bitbucket.org 24 Apr '14
by commits-noreply@bitbucket.org 24 Apr '14
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3b6e94371b8a/
Changeset: 3b6e94371b8a
User: dannon
Date: 2014-04-24 17:03:23
Summary: Add simplejson back again until 'anyjson' changes are pushed through bioblend.
Affected #: 1 file
diff -r de54998e192b99bd70751d692969b508463e1805 -r 3b6e94371b8a8e996cc8e317de8cbfe45790b94e eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -61,6 +61,7 @@
pytz = 2013.9
requests = 2.2.1
Routes = 1.12.3
+…
[View More]simplejson = 2.1.1
sqlalchemy_migrate = 0.7.2
ssh = 1.7.14
SVGFig = 1.1.6
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0
24 Apr '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/de54998e192b/
Changeset: de54998e192b
User: greg
Date: 2014-04-24 16:46:56
Summary: Add a new recipe manager to support tool dependency package installation. Create new classes for each recipe step where classes contain both a prepare_step() and an execute_step() function. This implementation breaks up the very large loops in install_util.py (where recipe steps are prepared) and …
[View More]fabric_util.py (where recipe steps are executed). The recipe manager currently loads all of the recipe step classes when it is instantiated - this approach could be tweaked if desired.
Affected #: 11 files
diff -r 369bb0f7ebef50a72f36cf2f74d23f7d351d61c4 -r de54998e192b99bd70751d692969b508463e1805 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -1,765 +1,64 @@
-# For Python 2.5
-from __future__ import with_statement
-
import logging
import os
-import Queue
-import shutil
-import stat
-import subprocess
-import sys
-import tempfile
-import td_common_util
-import threading
-import time
-import shlex
-from contextlib import contextmanager
-from galaxy.util import DATABASE_MAX_STRING_SIZE
-from galaxy.util import DATABASE_MAX_STRING_SIZE_PRETTY
-from galaxy.util import shrink_string_by_size
-from galaxy.util import unicodify
-from galaxy.util.template import fill_template
from galaxy import eggs
-eggs.require( 'ssh' )
-eggs.require( 'paramiko' )
eggs.require( 'Fabric' )
-from fabric import state
from fabric.api import env
-from fabric.api import hide
from fabric.api import lcd
-from fabric.api import settings
-from fabric.api import prefix
-from fabric.operations import _AttributeString
+
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import InstallEnvironment
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import RecipeManager
log = logging.getLogger( __name__ )
-INSTALLATION_LOG = 'INSTALLATION.log'
-VIRTUALENV_URL = 'https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.9.1.tar.gz'
-
-
-class AsynchronousReader( threading.Thread ):
- """
- A helper class to implement asynchronous reading of a stream in a separate thread. Read lines are pushed
- onto a queue to be consumed in another thread.
- """
-
- def __init__( self, fd, queue ):
- threading.Thread.__init__( self )
- self._fd = fd
- self._queue = queue
- self.lines = []
-
- def run( self ):
- """Read lines and put them on the queue."""
- thread_lock = threading.Lock()
- thread_lock.acquire()
- for line in iter( self._fd.readline, '' ):
- stripped_line = line.rstrip()
- self.lines.append( stripped_line )
- self._queue.put( stripped_line )
- thread_lock.release()
-
- def installation_complete( self ):
- """Make sure there is more installation and compilation logging content expected."""
- return not self.is_alive() and self._queue.empty()
-
-
-class EnvFileBuilder( object ):
-
- def __init__( self, install_dir ):
- self.install_dir = install_dir
- self.return_code = 0
-
- def append_line( self, make_executable=True, **kwd ):
- env_var_dict = dict( **kwd )
- env_entry, env_file = self.create_or_update_env_shell_file( self.install_dir, env_var_dict )
- return_code = file_append( env_entry, env_file, make_executable=make_executable )
- self.return_code = self.return_code or return_code
- return self.return_code
-
- @staticmethod
- def create_or_update_env_shell_file( install_dir, env_var_dict ):
- env_var_action = env_var_dict[ 'action' ]
- env_var_value = env_var_dict[ 'value' ]
- if env_var_action in [ 'prepend_to', 'set_to', 'append_to' ]:
- env_var_name = env_var_dict[ 'name' ]
- if env_var_action == 'prepend_to':
- changed_value = '%s:$%s' % ( env_var_value, env_var_name )
- elif env_var_action == 'set_to':
- changed_value = '%s' % env_var_value
- elif env_var_action == 'append_to':
- changed_value = '$%s:%s' % ( env_var_name, env_var_value )
- line = "%s=%s; export %s" % ( env_var_name, changed_value, env_var_name )
- elif env_var_action == "source":
- line = "if [ -f %s ] ; then . %s ; fi" % ( env_var_value, env_var_value )
- else:
- raise Exception( "Unknown shell file action %s" % env_var_action )
- env_shell_file_path = os.path.join( install_dir, 'env.sh' )
- return line, env_shell_file_path
-
-
-class InstallEnvironment( object ):
- """Object describing the environment built up as part of the process of building and installing a package."""
-
- def add_env_shell_file_paths( self, paths ):
- for path in paths:
- self.env_shell_file_paths.append( str( path ) )
-
- def build_command( self, command, action_type='shell_command' ):
- """
- Build command line for execution from simple command, but
- configuring environment described by this object.
- """
- env_cmds = self.environment_commands( action_type )
- return '\n'.join( env_cmds + [ command ] )
-
- def __call__( self, install_dir ):
- with settings( warn_only=True, **td_common_util.get_env_var_values( install_dir ) ):
- with prefix( self.__setup_environment() ):
- yield
-
- def environment_commands( self, action_type ):
- """Build a list of commands used to construct the environment described by this object."""
- cmds = []
- for env_shell_file_path in self.env_shell_file_paths:
- if os.path.exists( env_shell_file_path ):
- for env_setting in open( env_shell_file_path ):
- cmds.append( env_setting.strip( '\n' ) )
- else:
- log.debug( 'Invalid file %s specified, ignoring %s action.' % ( str( env_shell_file_path ), str( action_type ) ) )
- return cmds
-
- def environment_dict( self, action_type='template_command' ):
- env_vars = dict()
- for env_shell_file_path in self.env_shell_file_paths:
- if os.path.exists( env_shell_file_path ):
- for env_setting in open( env_shell_file_path ):
- env_string = env_setting.split( ';' )[ 0 ]
- env_name, env_path = env_string.split( '=' )
- env_vars[ env_name ] = env_path
- else:
- log.debug( 'Invalid file %s specified, ignoring template_command action.' % str( env_shell_file_path ) )
- return env_vars
-
- def __init__( self ):
- self.env_shell_file_paths = []
-
- def __setup_environment( self ):
- return "&&".join( [ ". %s" % file for file in self.__valid_env_shell_file_paths() ] )
-
- def __valid_env_shell_file_paths( self ):
- return [ file for file in self.env_shell_file_paths if os.path.exists( file ) ]
+INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet',
+ 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ]
def check_fabric_version():
version = env.version
if int( version.split( "." )[ 0 ] ) < 1:
raise NotImplementedError( "Install Fabric version 1.0 or later." )
-def close_file_descriptor( fd ):
- """Attempt to close a file descriptor."""
- start_timer = time.time()
- error = ''
- while True:
- try:
- fd.close()
- break
- except IOError, e:
- # Undoubtedly close() was called during a concurrent operation on the same file object.
- log.debug( 'Error closing file descriptor: %s' % str( e ) )
- time.sleep( .5 )
- current_wait_time = time.time() - start_timer
- if current_wait_time >= 600:
- error = 'Error closing file descriptor: %s' % str( e )
- break
- return error
-
-def enqueue_output( stdout, stdout_queue, stderr, stderr_queue ):
- """
- This method places streamed stdout and stderr into a threaded IPC queue target. Received data
- is printed and saved to that thread's queue. The calling thread can then retrieve the data using
- thread.stdout and thread.stderr.
- """
- stdout_logger = logging.getLogger( 'fabric_util.STDOUT' )
- stderr_logger = logging.getLogger( 'fabric_util.STDERR' )
- for line in iter( stdout.readline, '' ):
- output = line.rstrip()
- stdout_logger.debug( output )
- stdout_queue.put( output )
- stdout_queue.put( None )
- for line in iter( stderr.readline, '' ):
- output = line.rstrip()
- stderr_logger.debug( output )
- stderr_queue.put( output )
- stderr_queue.put( None )
-
-def file_append( text, file_path, make_executable=True ):
- """
- Append a line to a file unless the line already exists in the file. This method creates the file if
- it doesn't exist. If make_executable is True, the permissions on the file are set to executable by
- the owner.
- """
- file_dir = os.path.dirname( file_path )
- if not os.path.exists( file_dir ):
- try:
- os.makedirs( file_dir )
- except Exception, e:
- log.exception( str( e ) )
- return 1
- if os.path.exists( file_path ):
- try:
- new_env_file_contents = []
- env_file_contents = file( file_path, 'r' ).readlines()
- # Clean out blank lines from the env.sh file.
- for line in env_file_contents:
- line = line.rstrip()
- if line:
- new_env_file_contents.append( line )
- env_file_contents = new_env_file_contents
- except Exception, e:
- log.exception( str( e ) )
- return 1
- else:
- env_file_handle = open( file_path, 'w' )
- env_file_handle.close()
- env_file_contents = []
- if make_executable:
- # Explicitly set the file's executable bits.
- try:
- os.chmod( file_path, int( '111', base=8 ) | os.stat( file_path )[ stat.ST_MODE ] )
- except Exception, e:
- log.exception( str( e ) )
- return 1
- # Convert the received text to a list, in order to support adding one or more lines to the file.
- if isinstance( text, basestring ):
- text = [ text ]
- for line in text:
- line = line.rstrip()
- if line and line not in env_file_contents:
- env_file_contents.append( line )
- try:
- file( file_path, 'w' ).write( '\n'.join( env_file_contents ) )
- except Exception, e:
- log.exception( str( e ) )
- return 1
- return 0
-
-def filter_actions_after_binary_installation( actions ):
- '''Filter out actions that should not be processed if a binary download succeeded.'''
- filtered_actions = []
- for action in actions:
- action_type, action_dict = action
- if action_type in [ 'set_environment', 'chmod', 'download_binary' ]:
- filtered_actions.append( action )
- return filtered_actions
-
-def handle_action_shell_file_paths( env_file_builder, action_dict ):
- shell_file_paths = action_dict.get( 'action_shell_file_paths', [] )
- for shell_file_path in shell_file_paths:
- env_file_builder.append_line( action="source", value=shell_file_path )
-
-def handle_command( app, tool_dependency, install_dir, cmd, return_output=False ):
- """
- Handle a command by determining if it is "simple" or "complex" and redirecting appropriately and then
- logging the results.
- """
- context = app.install_model.context
- command = str( cmd )
- output = handle_complex_command( command )
- log_results( cmd, output, os.path.join( install_dir, INSTALLATION_LOG ) )
- stdout = output.stdout
- stderr = output.stderr
- if len( stdout ) > DATABASE_MAX_STRING_SIZE:
- print "Length of stdout > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY )
- stdout = shrink_string_by_size( stdout, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
- if len( stderr ) > DATABASE_MAX_STRING_SIZE:
- print "Length of stderr > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY )
- stderr = shrink_string_by_size( stderr, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
- if output.return_code not in [ 0 ]:
- tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR
- if stderr:
- tool_dependency.error_message = unicodify( stderr )
- elif stdout:
- tool_dependency.error_message = unicodify( stdout )
- else:
- # We have a problem if there was no stdout and no stderr.
- tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % \
- ( str( cmd ), str( output.return_code ) )
- context.add( tool_dependency )
- context.flush()
- if return_output:
- return output
- return output.return_code
-
-def handle_complex_command( command ):
- """
- Wrap subprocess.Popen in such a way that the stderr and stdout from running a shell command will
- be captured and logged in nearly real time. This is similar to fabric.local, but allows us to
- retain control over the process. This method is named "complex" because it uses queues and
- threads to execute a command while capturing and displaying the output.
- """
- # Launch the command as subprocess. A bufsize of 1 means line buffered.
- process_handle = subprocess.Popen( str( command ),
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- bufsize=1,
- close_fds=False,
- shell=True,
- cwd=state.env[ 'lcwd' ] )
- pid = process_handle.pid
- # Launch the asynchronous readers of the process' stdout and stderr.
- stdout_queue = Queue.Queue()
- stdout_reader = AsynchronousReader( process_handle.stdout, stdout_queue )
- stdout_reader.start()
- stderr_queue = Queue.Queue()
- stderr_reader = AsynchronousReader( process_handle.stderr, stderr_queue )
- stderr_reader.start()
- # Place streamed stdout and stderr into a threaded IPC queue target so it can
- # be printed and stored for later retrieval when generating the INSTALLATION.log.
- stdio_thread = threading.Thread( target=enqueue_output,
- args=( process_handle.stdout,
- stdout_queue,
- process_handle.stderr,
- stderr_queue ) )
- thread_lock = threading.Lock()
- thread_lock.acquire()
- stdio_thread.start()
- # Check the queues for output until there is nothing more to get.
- start_timer = time.time()
- while not stdout_reader.installation_complete() or not stderr_reader.installation_complete():
- # Show what we received from standard output.
- while not stdout_queue.empty():
- try:
- line = stdout_queue.get()
- except Queue.Empty:
- line = None
- break
- if line:
- print line
- start_timer = time.time()
- else:
- break
- # Show what we received from standard error.
- while not stderr_queue.empty():
- try:
- line = stderr_queue.get()
- except Queue.Empty:
- line = None
- break
- if line:
- print line
- start_timer = time.time()
- else:
- stderr_queue.task_done()
- break
- # Sleep a bit before asking the readers again.
- time.sleep( .1 )
- current_wait_time = time.time() - start_timer
- if stdout_queue.empty() and stderr_queue.empty() and current_wait_time > td_common_util.NO_OUTPUT_TIMEOUT:
- err_msg = "\nShutting down process id %s because it generated no output for the defined timeout period of %.1f seconds.\n" % \
- ( pid, td_common_util.NO_OUTPUT_TIMEOUT )
- stderr_reader.lines.append( err_msg )
- process_handle.kill()
- break
- thread_lock.release()
- # Wait until each of the threads we've started terminate. The following calls will block each thread
- # until it terminates either normally, through an unhandled exception, or until the timeout occurs.
- stdio_thread.join( td_common_util.NO_OUTPUT_TIMEOUT )
- stdout_reader.join( td_common_util.NO_OUTPUT_TIMEOUT )
- stderr_reader.join( td_common_util.NO_OUTPUT_TIMEOUT )
- # Close subprocess' file descriptors.
- error = close_file_descriptor( process_handle.stdout )
- error = close_file_descriptor( process_handle.stderr )
- stdout = '\n'.join( stdout_reader.lines )
- stderr = '\n'.join( stderr_reader.lines )
- # Handle error condition (deal with stdout being None, too)
- output = _AttributeString( stdout.strip() if stdout else "" )
- errors = _AttributeString( stderr.strip() if stderr else "" )
- # Make sure the process has finished.
- process_handle.poll()
- output.return_code = process_handle.returncode
- output.stderr = errors
- return output
-
-def handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, set_prior_environment_commands ):
- """
- This method works with with a combination of three tool dependency definition tag sets, which are defined
- in the tool_dependencies.xml file in the order discussed here. The example for this discussion is the
- tool_dependencies.xml file contained in the osra repository, which is available at:
-
- http://testtoolshed.g2.bx.psu.edu/view/bgruening/osra
-
- The first tag set defines a complex repository dependency like this. This tag set ensures that changeset
- revision XXX of the repository named package_graphicsmagick_1_3 owned by YYY in the tool shed ZZZ has been
- previously installed.
-
- <tool_dependency>
- <package name="graphicsmagick" version="1.3.18">
- <repository changeset_revision="XXX" name="package_graphicsmagick_1_3" owner="YYY" prior_installation_required="True" toolshed="ZZZ" />
- </package>
- ...
-
- * By the way, there is an env.sh file associated with version 1.3.18 of the graphicsmagick package which looks
- something like this (we'll reference this file later in this discussion.
- ----
- GRAPHICSMAGICK_ROOT_DIR=/<my configured tool dependency path>/graphicsmagick/1.3.18/YYY/package_graphicsmagick_1_3/XXX/gmagick;
- export GRAPHICSMAGICK_ROOT_DIR
- ----
-
- The second tag set defines a specific package dependency that has been previously installed (guaranteed by the
- tag set discussed above) and compiled, where the compiled dependency is needed by the tool dependency currently
- being installed (osra version 2.0.0 in this case) and complied in order for its installation and compilation to
- succeed. This tag set is contained within the <package name="osra" version="2.0.0"> tag set, which implies that
- version 2.0.0 of the osra package requires version 1.3.18 of the graphicsmagick package in order to successfully
- compile. When this tag set is handled, one of the effects is that the env.sh file associated with graphicsmagick
- version 1.3.18 is "sourced", which undoubtedly sets or alters certain environment variables (e.g. PATH, PYTHONPATH,
- etc).
-
- <!-- populate the environment variables from the dependent repositories -->
- <action type="set_environment_for_install">
- <repository changeset_revision="XXX" name="package_graphicsmagick_1_3" owner="YYY" toolshed="ZZZ">
- <package name="graphicsmagick" version="1.3.18" />
- </repository>
- </action>
-
- The third tag set enables discovery of the same required package dependency discussed above for correctly compiling
- the osra version 2.0.0 package, but in this case the package can be discovered at tool execution time. Using the
- $ENV[] option as shown in this example, the value of the environment variable named GRAPHICSMAGICK_ROOT_DIR (which
- was set in the environment using the second tag set described above) will be used to automatically alter the env.sh
- file associated with the osra version 2.0.0 tool dependency when it is installed into Galaxy. * Refer to where we
- discussed the env.sh file for version 1.3.18 of the graphicsmagick package above.
-
- <action type="set_environment">
- <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">$ENV[GRAPHICSMAGICK_ROOT_DIR]/lib/</environment_variable>
- <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">$INSTALL_DIR/potrace/build/lib/</environment_variable>
- <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/bin</environment_variable>
- <!-- OSRA_DATA_FILES is only used by the galaxy wrapper and is not part of OSRA -->
- <environment_variable action="set_to" name="OSRA_DATA_FILES">$INSTALL_DIR/share</environment_variable>
- </action>
-
- The above tag will produce an env.sh file for version 2.0.0 of the osra package when it it installed into Galaxy
- that looks something like this. Notice that the path to the gmagick binary is included here since it expands the
- defined $ENV[GRAPHICSMAGICK_ROOT_DIR] value in the above tag set.
-
- ----
- LD_LIBRARY_PATH=/<my configured tool dependency path>/graphicsmagick/1.3.18/YYY/package_graphicsmagick_1_3/XXX/gmagick/lib/:$LD_LIBRARY_PATH;
- export LD_LIBRARY_PATH
- LD_LIBRARY_PATH=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/potrace/build/lib/:$LD_LIBRARY_PATH;
- export LD_LIBRARY_PATH
- PATH=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/bin:$PATH;
- export PATH
- OSRA_DATA_FILES=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/share;
- export OSRA_DATA_FILES
- ----
- """
- env_var_value = env_var_dict[ 'value' ]
- # env_var_value is the text of an environment variable tag like this: <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">
- # Here is an example of what env_var_value could look like: $ENV[GRAPHICSMAGICK_ROOT_DIR]/lib/
- if '$ENV[' in env_var_value and ']' in env_var_value:
- # Pull out the name of the environment variable to populate.
- inherited_env_var_name = env_var_value.split( '[' )[1].split( ']' )[0]
- to_replace = '$ENV[%s]' % inherited_env_var_name
- # Build a command line that outputs VARIABLE_NAME: <the value of the variable>.
- set_prior_environment_commands.append( 'echo %s: $%s' % ( inherited_env_var_name, inherited_env_var_name ) )
- command = ' ; '.join( set_prior_environment_commands )
- # Run the command and capture the output.
- command_return = handle_command( app, tool_dependency, install_dir, command, return_output=True )
- # And extract anything labeled with the name of the environment variable we're populating here.
- if '%s: ' % inherited_env_var_name in command_return:
- environment_variable_value = command_return.split( '\n' )
- for line in environment_variable_value:
- if line.startswith( inherited_env_var_name ):
- inherited_env_var_value = line.replace( '%s: ' % inherited_env_var_name, '' )
- log.info( 'Replacing %s with %s in env.sh for this repository.', to_replace, inherited_env_var_value )
- env_var_value = env_var_value.replace( to_replace, inherited_env_var_value )
- else:
- # If the return is empty, replace the original $ENV[] with nothing, to avoid any shell misparsings later on.
- log.error( 'Environment variable %s not found, removing from set_environment.', inherited_env_var_name )
- env_var_value = env_var_value.replace( to_replace, '$%s' % inherited_env_var_name )
- env_var_dict[ 'value' ] = env_var_value
- return env_var_dict
-
-def install_virtualenv( app, venv_dir ):
- if not os.path.exists( venv_dir ):
- with make_tmp_dir() as work_dir:
- downloaded_filename = VIRTUALENV_URL.rsplit('/', 1)[-1]
- try:
- dir = td_common_util.url_download( work_dir, downloaded_filename, VIRTUALENV_URL )
- except:
- log.error( "Failed to download virtualenv: td_common_util.url_download( '%s', '%s', '%s' ) threw an exception", work_dir, downloaded_filename, VIRTUALENV_URL )
- return False
- full_path_to_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- shutil.move( full_path_to_dir, venv_dir )
- return True
-
def install_and_build_package( app, tool_dependency, actions_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
install_dir = actions_dict[ 'install_dir' ]
package_name = actions_dict[ 'package_name' ]
actions = actions_dict.get( 'actions', None )
filtered_actions = []
+ env_file_builder = EnvFileBuilder( install_dir )
install_environment = InstallEnvironment()
+ recipe_manager = RecipeManager()
if actions:
- with make_tmp_dir() as work_dir:
+ with install_environment.make_tmp_dir() as work_dir:
with lcd( work_dir ):
- # The first action in the list of actions will be the one that defines the installation process. There
- # are currently three supported processes; download_binary, download_by_url and clone via a "shell_command"
- # action type.
+ # The first action in the list of actions will be the one that defines the initial download process.
+ # There are currently three supported actions; download_binary, download_by_url and clone via a
+ # shell_command action type. The recipe steps will be filtered at this stage in the process, with
+ # the filtered actions being used in the next stage below. The installation directory (i.e., dir)
+ # is also defined in this stage and is used in the next stage below when defining current_dir.
action_type, action_dict = actions[ 0 ]
- if action_type == 'download_binary':
- url = action_dict[ 'url' ]
- # Get the target directory for this download, if the user has specified one. Default to the root of $INSTALL_DIR.
- target_directory = action_dict.get( 'target_directory', None )
- # Attempt to download a binary from the specified URL.
- log.debug( 'Attempting to download from %s to %s', url, str( target_directory ) )
- downloaded_filename = None
- try:
- downloaded_filename = td_common_util.download_binary( url, work_dir )
- # Filter out any actions that are not download_binary, chmod, or set_environment.
- filtered_actions = filter_actions_after_binary_installation( actions[ 1: ] )
- # Set actions to the same, so that the current download_binary doesn't get re-run in the
- # filtered actions below.
- actions = filtered_actions
- except Exception, e:
- log.exception( str( e ) )
- # No binary exists, or there was an error downloading the binary from the generated URL.
- # Proceed with the remaining actions.
- filtered_actions = actions[ 1: ]
- action_type, action_dict = filtered_actions[ 0 ]
- # If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that
- # any errors in the move step are correctly sent to the tool dependency error handler.
- if downloaded_filename and os.path.exists( os.path.join( work_dir, downloaded_filename ) ):
- if target_directory:
- target_directory = os.path.realpath( os.path.normpath( os.path.join( install_dir, target_directory ) ) )
- # Make sure the target directory is not outside of $INSTALL_DIR.
- if target_directory.startswith( os.path.realpath( install_dir ) ):
- full_path_to_dir = os.path.abspath( os.path.join( install_dir, target_directory ) )
- else:
- full_path_to_dir = os.path.abspath( install_dir )
- else:
- full_path_to_dir = os.path.abspath( install_dir )
- td_common_util.move_file( current_dir=work_dir,
- source=downloaded_filename,
- destination=full_path_to_dir )
- if action_type == 'download_by_url':
- # Eliminate the download_by_url action so remaining actions can be processed correctly.
- filtered_actions = actions[ 1: ]
- url = action_dict[ 'url' ]
- is_binary = action_dict.get( 'is_binary', False )
- log.debug( 'Attempting to download via url: %s', url )
- if 'target_filename' in action_dict:
- # Sometimes compressed archives extract their content to a folder other than the default
- # defined file name. Using this attribute will ensure that the file name is set appropriately
- # and can be located after download, decompression and extraction.
- downloaded_filename = action_dict[ 'target_filename' ]
- else:
- downloaded_filename = os.path.split( url )[ -1 ]
- dir = td_common_util.url_download( work_dir, downloaded_filename, url, extract=True )
- if is_binary:
- log_file = os.path.join( install_dir, INSTALLATION_LOG )
- if os.path.exists( log_file ):
- logfile = open( log_file, 'ab' )
- else:
- logfile = open( log_file, 'wb' )
- logfile.write( 'Successfully downloaded from url: %s\n' % action_dict[ 'url' ] )
- logfile.close()
- log.debug( 'Successfully downloaded from url: %s' % action_dict[ 'url' ] )
- elif action_type == 'shell_command':
- # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
- # Eliminate the shell_command clone action so remaining actions can be processed correctly.
- filtered_actions = actions[ 1: ]
- return_code = handle_command( app, tool_dependency, install_dir, action_dict[ 'command' ] )
- if return_code:
- return tool_dependency
- dir = package_name
- elif action_type == 'download_file':
- # <action type="download_file">http://effectors.org/download/version/TTSS_GUI-1.0.1.jar</action>
- # Download a single file to the working directory.
- filtered_actions = actions[ 1: ]
- url = action_dict[ 'url' ]
- if 'target_filename' in action_dict:
- # Sometimes compressed archives extracts their content to a folder other than the default
- # defined file name. Using this attribute will ensure that the file name is set appropriately
- # and can be located after download, decompression and extraction.
- filename = action_dict[ 'target_filename' ]
- else:
- filename = url.split( '/' )[ -1 ]
- td_common_util.url_download( work_dir, filename, url )
- dir = os.path.curdir
- elif action_type == 'setup_r_environment':
- # setup an R environment
- # <action type="setup_r_environment">
- # <repository name="package_r_3_0_1" owner="bgruening">
- # <package name="R" version="3.0.1" />
- # </repository>
- # <!-- allow installing an R packages -->
- # <package>https://github.com/bgruening/download_store/raw/master/DESeq2-1_0_18/BiocGe…</package>
- # </action>
- filtered_actions = actions[ 1: ]
- env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
- if env_shell_file_paths is None:
- log.debug( 'Missing R environment. Please check your specified R installation exists.' )
- return tool_dependency
- else:
- install_environment.add_env_shell_file_paths( env_shell_file_paths )
- log.debug( 'Handling setup_r_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' % \
- ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
- tarball_names = []
- for url in action_dict[ 'r_packages' ]:
- filename = url.split( '/' )[ -1 ]
- tarball_names.append( filename )
- td_common_util.url_download( work_dir, filename, url, extract=False )
- dir = os.path.curdir
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- with lcd( current_dir ):
- with settings( warn_only=True ):
- for tarball_name in tarball_names:
- # Use raw strings so that python won't automatically unescape the quotes before passing the command
- # to subprocess.Popen.
- cmd = r'''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR; export R_LIBS;
- Rscript -e "install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)"''' % \
- ( str( tarball_name ) )
- cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- # R libraries are installed to $INSTALL_DIR (install_dir), we now set the R_LIBS path to that directory
- env_file_builder = EnvFileBuilder( install_dir )
- # Pull in R environment (runtime).
- handle_action_shell_file_paths( env_file_builder, action_dict )
- env_file_builder.append_line( name="R_LIBS", action="prepend_to", value=install_dir )
- return_code = env_file_builder.return_code
- if return_code:
- return tool_dependency
- elif action_type == 'setup_ruby_environment':
- # setup an Ruby environment
- # <action type="setup_ruby_environment">
- # <repository name="package_ruby_2_0" owner="bgruening">
- # <package name="ruby" version="2.0" />
- # </repository>
- # <!-- allow downloading and installing an Ruby package from http://rubygems.org/ -->
- # <package>protk</package>
- # <package>protk=1.2.4</package>
- # <package>http://url-to-some-gem-file.de/protk.gem</package>
- # </action>
- filtered_actions = actions[ 1: ]
- env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
- if env_shell_file_paths is None:
- log.debug( 'Missing Ruby environment, make sure your specified Ruby installation exists.' )
- return tool_dependency
- else:
- install_environment.add_env_shell_file_paths( env_shell_file_paths )
- log.debug( 'Handling setup_ruby_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' % \
- ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
- dir = os.path.curdir
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- with lcd( current_dir ):
- with settings( warn_only=True ):
- ruby_package_tups = action_dict.get( 'ruby_package_tups', [] )
- for ruby_package_tup in ruby_package_tups:
- gem, gem_version = ruby_package_tup
- if os.path.isfile( gem ):
- # we assume a local shipped gem file
- cmd = '''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
- gem install --local %s''' % ( gem )
- elif gem.find( '://' ) != -1:
- # We assume a URL to a gem file.
- url = gem
- gem_name = url.split( '/' )[ -1 ]
- td_common_util.url_download( work_dir, gem_name, url, extract=False )
- cmd = '''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
- gem install --local %s ''' % ( gem_name )
- else:
- # gem file from rubygems.org with or without version number
- if gem_version:
- # Specific ruby gem version was requested.
- # Use raw strings so that python won't automatically unescape the quotes before passing the command
- # to subprocess.Popen.
- cmd = r'''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
- gem install %s --version "=%s"''' % ( gem, gem_version)
- else:
- # no version number given
- cmd = '''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
- gem install %s''' % ( gem )
- cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- env_file_builder = EnvFileBuilder( install_dir )
- # Pull in ruby dependencies (runtime).
- handle_action_shell_file_paths( env_file_builder, action_dict )
- env_file_builder.append_line( name="GEM_PATH", action="prepend_to", value=install_dir )
- env_file_builder.append_line( name="PATH", action="prepend_to", value=os.path.join(install_dir, 'bin') )
- return_code = env_file_builder.return_code
- if return_code:
- return tool_dependency
- elif action_type == 'setup_perl_environment':
- # setup an Perl environment
- # <action type="setup_perl_environment">
- # <repository name="package_perl_5_18" owner="bgruening">
- # <package name="perl" version="5.18.1" />
- # </repository>
- # <!-- allow downloading and installing an Perl package from cpan.org-->
- # <package>XML::Parser</package>
- # <package>http://search.cpan.org/CPAN/authors/id/C/CJ/CJFIELDS/BioPerl-1.6.922.tar.gz</package>
- # </action>
- filtered_actions = actions[ 1: ]
- env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
- if env_shell_file_paths is None:
- log.debug( 'Missing Rerl environment, make sure your specified Rerl installation exists.' )
- return tool_dependency
- else:
- install_environment.add_env_shell_file_paths( env_shell_file_paths )
- log.debug( 'Handling setup_perl_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' % \
- ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
- dir = os.path.curdir
- current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- with lcd( current_dir ):
- with settings( warn_only=True ):
- perl_packages = action_dict.get( 'perl_packages', [] )
- for perl_package in perl_packages:
- # If set to a true value then MakeMaker's prompt function will always
- # return the default without waiting for user input.
- cmd = '''PERL_MM_USE_DEFAULT=1; export PERL_MM_USE_DEFAULT; '''
- if perl_package.find( '://' ) != -1:
- # We assume a URL to a gem file.
- url = perl_package
- perl_package_name = url.split( '/' )[ -1 ]
- dir = td_common_util.url_download( work_dir, perl_package_name, url, extract=True )
- # Search for Build.PL or Makefile.PL (ExtUtils::MakeMaker vs. Module::Build).
- tmp_work_dir = os.path.join( work_dir, dir )
- if os.path.exists( os.path.join( tmp_work_dir, 'Makefile.PL' ) ):
- cmd += '''perl Makefile.PL INSTALL_BASE=$INSTALL_DIR && make && make install'''
- elif os.path.exists( os.path.join( tmp_work_dir, 'Build.PL' ) ):
- cmd += '''perl Build.PL --install_base $INSTALL_DIR && perl Build && perl Build install'''
- else:
- log.debug( 'No Makefile.PL or Build.PL file found in %s. Skipping installation of %s.' % \
- ( url, perl_package_name ) )
- return tool_dependency
- with lcd( tmp_work_dir ):
- cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- else:
- # perl package from CPAN without version number.
- # cpanm should be installed with the parent perl distribution, otherwise this will not work.
- cmd += '''cpanm --local-lib=$INSTALL_DIR %s''' % ( perl_package )
- cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- env_file_builder = EnvFileBuilder( install_dir )
- # Pull in perl dependencies (runtime).
- handle_action_shell_file_paths( env_file_builder, action_dict )
- # Recursively add dependent PERL5LIB and PATH to env.sh & anything else needed.
- env_file_builder.append_line( name="PERL5LIB", action="prepend_to", value=os.path.join( install_dir, 'lib', 'perl5' ) )
- env_file_builder.append_line( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'bin' ) )
- return_code = env_file_builder.return_code
- if return_code:
- return tool_dependency
+ if action_type in INSTALL_ACTIONS:
+ # Some of the parameters passed here are needed only by a subset of the step handler classes,
+ # but to allow for a standard method signature we'll pass them along. We don't check the
+ # tool_dependency status in this stage because it should not have been changed based on a
+ # download.
+ tool_dependency, filtered_actions, dir = \
+ recipe_manager.execute_step( app=app,
+ tool_dependency=tool_dependency,
+ package_name=package_name,
+ actions=actions,
+ action_type=action_type,
+ action_dict=action_dict,
+ filtered_actions=filtered_actions,
+ env_file_builder=env_file_builder,
+ install_environment=install_environment,
+ work_dir=work_dir,
+ install_dir=install_dir,
+ current_dir=None,
+ initial_download=True )
else:
# We're handling a complex repository dependency where we only have a set_environment tag set.
# <action type="set_environment">
@@ -767,207 +66,33 @@
# </action>
filtered_actions = [ a for a in actions ]
dir = install_dir
- full_path_to_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- if not os.path.exists( full_path_to_dir ):
- os.makedirs( full_path_to_dir )
- # The package has been down-loaded, so we can now perform all of the actions defined for building it.
+ # We're in stage 2 of the installation process. The package has been down-loaded, so we can
+ # now perform all of the actions defined for building it.
for action_tup in filtered_actions:
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
with lcd( current_dir ):
action_type, action_dict = action_tup
- if action_type == 'make_directory':
- if os.path.isabs( action_dict[ 'full_path' ] ):
- full_path = action_dict[ 'full_path' ]
- else:
- full_path = os.path.join( current_dir, action_dict[ 'full_path' ] )
- td_common_util.make_directory( full_path=full_path )
- elif action_type == 'move_directory_files':
- td_common_util.move_directory_files( current_dir=current_dir,
- source_dir=os.path.join( action_dict[ 'source_directory' ] ),
- destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
- elif action_type == 'move_file':
- td_common_util.move_file( current_dir=current_dir,
- source=os.path.join( action_dict[ 'source' ] ),
- destination=os.path.join( action_dict[ 'destination' ] ),
- rename_to=action_dict[ 'rename_to' ] )
- elif action_type == 'set_environment':
- # Currently the only action supported in this category is "environment_variable".
- # Build a command line from the prior_installation_required, in case an environment variable is referenced
- # in the set_environment action.
- cmds = install_environment.environment_commands( 'set_environment' )
- env_var_dicts = action_dict[ 'environment_variable' ]
- env_file_builder = EnvFileBuilder( install_dir )
- for env_var_dict in env_var_dicts:
- # Check for the presence of the $ENV[] key string and populate it if possible.
- env_var_dict = handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, cmds )
- env_file_builder.append_line( **env_var_dict )
- return_code = env_file_builder.return_code
- if return_code:
- return tool_dependency
- elif action_type == 'set_environment_for_install':
- # Currently the only action supported in this category is a list of paths to one or more tool dependency env.sh files,
- # the environment setting in each of which will be injected into the environment for all <action type="shell_command">
- # tags that follow this <action type="set_environment_for_install"> tag set in the tool_dependencies.xml file.
- install_environment.add_env_shell_file_paths( action_dict[ 'env_shell_file_paths' ] )
- elif action_type == 'setup_virtualenv':
- # TODO: maybe should be configurable
- venv_src_directory = os.path.abspath( os.path.join( app.config.tool_dependency_dir, '__virtualenv_src' ) )
- if not install_virtualenv( app, venv_src_directory ):
- log.error( 'Unable to install virtualenv' )
- return tool_dependency
- requirements = action_dict[ 'requirements' ]
- if os.path.exists( os.path.join( dir, requirements ) ):
- # requirements specified as path to a file
- requirements_path = requirements
- else:
- # requirements specified directly in XML, create a file with these for pip.
- requirements_path = os.path.join( install_dir, "requirements.txt" )
- with open( requirements_path, "w" ) as f:
- f.write( requirements )
- venv_directory = os.path.join( install_dir, "venv" )
- # TODO: Consider making --no-site-packages optional.
- setup_command = "python %s/virtualenv.py --no-site-packages '%s'" % (venv_src_directory, venv_directory)
- # POSIXLY_CORRECT forces shell commands . and source to have the same
- # and well defined behavior in bash/zsh.
- activate_command = "POSIXLY_CORRECT=1; . %s" % os.path.join( venv_directory, "bin", "activate" )
- install_command = "python '%s' install -r '%s'" % ( os.path.join( venv_directory, "bin", "pip" ), requirements_path )
- full_setup_command = "%s; %s; %s" % ( setup_command, activate_command, install_command )
- return_code = handle_command( app, tool_dependency, install_dir, full_setup_command )
- if return_code:
- return tool_dependency
- # Use raw strings so that python won't automatically unescape the quotes before passing the command
- # to subprocess.Popen.
- site_packages_command = r"""%s -c 'import os, sys; print os.path.join(sys.prefix, "lib", "python" + sys.version[:3], "site-packages")'""" % os.path.join( venv_directory, "bin", "python" )
- output = handle_command( app, tool_dependency, install_dir, site_packages_command, return_output=True )
- if output.return_code:
- return tool_dependency
- if not os.path.exists( output.stdout ):
- log.debug( "virtualenv's site-packages directory '%s' does not exist", output.stdout )
- return tool_dependency
- env_file_builder = EnvFileBuilder( install_dir )
- env_file_builder.append_line( name="PYTHONPATH", action="prepend_to", value=output.stdout )
- env_file_builder.append_line( name="PATH", action="prepend_to", value=os.path.join( venv_directory, "bin" ) )
- return_code = env_file_builder.return_code
- if return_code:
- return tool_dependency
- elif action_type == 'shell_command':
- with settings( warn_only=True ):
- cmd = install_environment.build_command( action_dict[ 'command' ] )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- elif action_type == 'template_command':
- env_vars = dict()
- env_vars = install_environment.environment_dict()
- env_vars.update( td_common_util.get_env_var_values( install_dir ) )
- language = action_dict[ 'language' ]
- with settings( warn_only=True, **env_vars ):
- if language == 'cheetah':
- # We need to import fabric.api.env so that we can access all collected environment variables.
- cmd = fill_template( '#from fabric.api import env\n%s' % action_dict[ 'command' ], context=env_vars )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- elif action_type == 'make_install':
- # make; make install; allow providing make options
- with settings( warn_only=True ):
- make_opts = action_dict.get( 'make_opts', '' )
- cmd = install_environment.build_command( 'make %s && make install' % make_opts )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- elif action_type == 'autoconf':
- # Handle configure, make and make install allow providing configuration options
- with settings( warn_only=True ):
- configure_opts = action_dict.get( 'configure_opts', '' )
- if 'prefix=' in configure_opts:
- pre_cmd = './configure %s && make && make install' % configure_opts
- else:
- pre_cmd = './configure --prefix=$INSTALL_DIR %s && make && make install' % configure_opts
- cmd = install_environment.build_command( td_common_util.evaluate_template( pre_cmd, install_dir ) )
- return_code = handle_command( app, tool_dependency, install_dir, cmd )
- if return_code:
- return tool_dependency
- elif action_type == 'download_file':
- # Download a single file to the current working directory.
- url = action_dict[ 'url' ]
- if 'target_filename' in action_dict:
- filename = action_dict[ 'target_filename' ]
- else:
- filename = url.split( '/' )[ -1 ]
- extract = action_dict.get( 'extract', False )
- td_common_util.url_download( current_dir, filename, url, extract=extract )
- elif action_type == 'change_directory':
- target_directory = os.path.realpath( os.path.normpath( os.path.join( current_dir, action_dict[ 'directory' ] ) ) )
- if target_directory.startswith( os.path.realpath( current_dir ) ) and os.path.exists( target_directory ):
- # Change directory to a directory within the current working directory.
- dir = target_directory
- elif target_directory.startswith( os.path.realpath( work_dir ) ) and os.path.exists( target_directory ):
- # Change directory to a directory above the current working directory, but within the defined work_dir.
- dir = target_directory.replace( os.path.realpath( work_dir ), '' ).lstrip( '/' )
- else:
- log.error( 'Invalid or nonexistent directory %s specified, ignoring change_directory action.', target_directory )
- elif action_type == 'chmod':
- for target_file, mode in action_dict[ 'change_modes' ]:
- if os.path.exists( target_file ):
- os.chmod( target_file, mode )
- else:
- log.error( 'Invalid file %s specified, ignoring %s action.', target_file, action_type )
- elif action_type == 'download_binary':
- url = action_dict[ 'url' ]
- target_directory = action_dict.get( 'target_directory', None )
- try:
- downloaded_filename = td_common_util.download_binary( url, work_dir )
- except Exception, e:
- log.exception( str( e ) )
- # If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that
- # any errors in the move step are correctly sent to the tool dependency error handler.
- if downloaded_filename and os.path.exists( os.path.join( work_dir, downloaded_filename ) ):
- if target_directory:
- target_directory = os.path.realpath( os.path.normpath( os.path.join( install_dir, target_directory ) ) )
- # Make sure the target directory is not outside of $INSTALL_DIR.
- if target_directory.startswith( os.path.realpath( install_dir ) ):
- full_path_to_dir = os.path.abspath( os.path.join( install_dir, target_directory ) )
- else:
- full_path_to_dir = os.path.abspath( install_dir )
- else:
- full_path_to_dir = os.path.abspath( install_dir )
- td_common_util.move_file( current_dir=work_dir,
- source=downloaded_filename,
- destination=full_path_to_dir )
+ tool_dependency, tmp_filtered_actions, tmp_dir = \
+ recipe_manager.execute_step( app=app,
+ tool_dependency=tool_dependency,
+ package_name=package_name,
+ actions=actions,
+ action_type=action_type,
+ action_dict=action_dict,
+ filtered_actions=filtered_actions,
+ env_file_builder=env_file_builder,
+ install_environment=install_environment,
+ work_dir=work_dir,
+ install_dir=install_dir,
+ current_dir=current_dir,
+ initial_download=False )
+ if tool_dependency.status in [ app.model.ToolDependency.installation_status.ERROR ]:
+ # If the tool_dependency status is in an error state, return it with no additional
+ # processing.
+ return tool_dependency
+ # Make sure to handle the special case where the value of dir is reset (this happens when
+ # the action_type is change_directiory). In all other action types, dir will be returned as
+ # None.
+ if tmp_dir is not None:
+ dir = tmp_dir
return tool_dependency
-
-def log_results( command, fabric_AttributeString, file_path ):
- """Write attributes of fabric.operations._AttributeString to a specified log file."""
- if os.path.exists( file_path ):
- logfile = open( file_path, 'ab' )
- else:
- logfile = open( file_path, 'wb' )
- logfile.write( "\n#############################################\n" )
- logfile.write( '%s\nSTDOUT\n' % command )
- logfile.write( str( fabric_AttributeString.stdout ) )
- logfile.write( "\n#############################################\n" )
- logfile.write( "\n#############################################\n" )
- logfile.write( '%s\nSTDERR\n' % command )
- logfile.write( str( fabric_AttributeString.stderr ) )
- logfile.write( "\n#############################################\n" )
- logfile.close()
-
-@contextmanager
-def make_tmp_dir():
- work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-mtd" )
- yield work_dir
- if os.path.exists( work_dir ):
- try:
- shutil.rmtree( work_dir )
- except Exception, e:
- log.exception( str( e ) )
-
-def set_galaxy_environment( galaxy_user, tool_dependency_dir, host='localhost', shell='/bin/bash -l -c' ):
- """General Galaxy environment configuration. This method is not currently used."""
- env.user = galaxy_user
- env.install_dir = tool_dependency_dir
- env.host_string = host
- env.shell = shell
- env.use_sudo = False
- return env
diff -r 369bb0f7ebef50a72f36cf2f74d23f7d351d61c4 -r de54998e192b99bd70751d692969b508463e1805 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -5,9 +5,10 @@
import subprocess
import sys
import tempfile
-from string import Template
import fabric_util
import td_common_util
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import RecipeManager
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_util
from tool_shed.util import encoding_util
@@ -94,7 +95,7 @@
package_name,
package_version,
required_repository )
- env_file_builder = fabric_util.EnvFileBuilder( tool_dependency.installation_directory( app ) )
+ env_file_builder = EnvFileBuilder( tool_dependency.installation_directory( app ) )
env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path )
return_code = env_file_builder.return_code
if return_code:
@@ -497,19 +498,24 @@
elif package_elem.tag == 'readme':
# Nothing to be done.
continue
- #elif package_elem.tag == 'proprietary_fabfile':
+ #elif package_elem.tag == 'custom_fabfile':
# # TODO: This is not yet supported or functionally correct...
- # # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
+ # # Handle tool dependency installation where the repository includes one or more custom fabric scripts.
# if not fabric_version_checked:
# check_fabric_version()
# fabric_version_checked = True
# fabfile_name = package_elem.get( 'name', None )
- # proprietary_fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
- # print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path
+ # custom_fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
+ # print 'Installing tool dependencies via fabric script ', custom_fabfile_path
return tool_dependency
-def install_via_fabric( app, tool_dependency, install_dir, package_name=None, proprietary_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ):
- """Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
+def install_via_fabric( app, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None,
+ actions_elem=None, action_elem=None, **kwd ):
+ """
+ Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using the
+ fabric_util.install_and_build_package() method. The use of fabric is being eliminated, so some of these
+ functions may need to be renamed at some point.
+ """
sa_session = app.install_model.context
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
@@ -517,278 +523,30 @@
if package_name:
actions_dict[ 'package_name' ] = package_name
actions = []
- all_env_shell_file_paths = []
- env_var_dicts = []
+ is_binary_download = False
if actions_elem is not None:
elems = actions_elem
if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None:
is_binary_download = True
- else:
- is_binary_download = False
elif action_elem is not None:
# We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded.
elems = [ action_elem ]
else:
elems = []
+ recipe_manager = RecipeManager()
for action_elem in elems:
# Make sure to skip all comments, since they are now included in the XML tree.
if action_elem.tag != 'action':
continue
action_dict = {}
action_type = action_elem.get( 'type', 'shell_command' )
- if action_type == 'download_binary':
- platform_info_dict = tool_dependency_util.get_platform_info_dict()
- platform_info_dict[ 'name' ] = tool_dependency.name
- platform_info_dict[ 'version' ] = tool_dependency.version
- url_template_elems = action_elem.findall( 'url_template' )
- # Check if there are multiple url_template elements, each with attrib entries for a specific platform.
- if len( url_template_elems ) > 1:
- # <base_url os="darwin" extract="false">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit</base_url>
- # This method returns the url_elem that best matches the current platform as received from os.uname().
- # Currently checked attributes are os and architecture.
- # These correspond to the values sysname and processor from the Python documentation for os.uname().
- url_template_elem = tool_dependency_util.get_download_url_for_platform( url_template_elems, platform_info_dict )
- else:
- url_template_elem = url_template_elems[ 0 ]
- action_dict[ 'url' ] = Template( url_template_elem.text ).safe_substitute( platform_info_dict )
- action_dict[ 'target_directory' ] = action_elem.get( 'target_directory', None )
- elif action_type == 'shell_command':
- # <action type="shell_command">make</action>
- action_elem_text = td_common_util.evaluate_template( action_elem.text, install_dir )
- if action_elem_text:
- action_dict[ 'command' ] = action_elem_text
- else:
- continue
- elif action_type == 'template_command':
- # Default to Cheetah as it's the first template language supported.
- language = action_elem.get( 'language', 'cheetah' ).lower()
- if language == 'cheetah':
- # Cheetah template syntax.
- # <action type="template_command" language="cheetah">
- # #if env.PATH:
- # make
- # #end if
- # </action>
- action_elem_text = action_elem.text.strip()
- if action_elem_text:
- action_dict[ 'language' ] = language
- action_dict[ 'command' ] = action_elem_text
- else:
- continue
- else:
- log.debug( "Unsupported template language '%s'. Not proceeding." % str( language ) )
- raise Exception( "Unsupported template language '%s' in tool dependency definition." % str( language ) )
- elif action_type == 'download_by_url':
- # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
- if is_binary_download:
- action_dict[ 'is_binary' ] = True
- if action_elem.text:
- action_dict[ 'url' ] = action_elem.text
- target_filename = action_elem.get( 'target_filename', None )
- if target_filename:
- action_dict[ 'target_filename' ] = target_filename
- else:
- continue
- elif action_type == 'download_file':
- # <action type="download_file">http://effectors.org/download/version/TTSS_GUI-1.0.1.jar</action>
- if action_elem.text:
- action_dict[ 'url' ] = action_elem.text
- target_filename = action_elem.get( 'target_filename', None )
- if target_filename:
- action_dict[ 'target_filename' ] = target_filename
- action_dict[ 'extract' ] = asbool( action_elem.get( 'extract', False ) )
- else:
- continue
- elif action_type == 'make_directory':
- # <action type="make_directory">$INSTALL_DIR/lib/python</action>
- if action_elem.text:
- action_dict[ 'full_path' ] = td_common_util.evaluate_template( action_elem.text, install_dir )
- else:
- continue
- elif action_type == 'change_directory':
- # <action type="change_directory">PHYLIP-3.6b</action>
- if action_elem.text:
- action_dict[ 'directory' ] = action_elem.text
- else:
- continue
- elif action_type == 'move_directory_files':
- # <action type="move_directory_files">
- # <source_directory>bin</source_directory>
- # <destination_directory>$INSTALL_DIR/bin</destination_directory>
- # </action>
- for move_elem in action_elem:
- move_elem_text = td_common_util.evaluate_template( move_elem.text, install_dir )
- if move_elem_text:
- action_dict[ move_elem.tag ] = move_elem_text
- elif action_type == 'move_file':
- # <action type="move_file" rename_to="new_file_name">
- # <source>misc/some_file</source>
- # <destination>$INSTALL_DIR/bin</destination>
- # </action>
- action_dict[ 'source' ] = td_common_util.evaluate_template( action_elem.find( 'source' ).text, install_dir )
- action_dict[ 'destination' ] = td_common_util.evaluate_template( action_elem.find( 'destination' ).text, install_dir )
- action_dict[ 'rename_to' ] = action_elem.get( 'rename_to' )
- elif action_type == 'set_environment':
- # <action type="set_environment">
- # <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
- # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
- # </action>
- for env_elem in action_elem:
- if env_elem.tag == 'environment_variable':
- env_var_dict = td_common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
- if env_var_dict:
- env_var_dicts.append( env_var_dict )
- if env_var_dicts:
- # The last child of an <action type="set_environment"> might be a comment, so manually set it to be 'environment_variable'.
- action_dict[ 'environment_variable' ] = env_var_dicts
- else:
- continue
- elif action_type == 'set_environment_for_install':
- # <action type="set_environment_for_install">
- # <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056">
- # <package name="numpy" version="1.7.1" />
- # </repository>
- # </action>
- # This action type allows for defining an environment that will properly compile a tool dependency. Currently, tag set definitions like
- # that above are supported, but in the future other approaches to setting environment variables or other environment attributes can be
- # supported. The above tag set will result in the installed and compiled numpy version 1.7.1 binary to be used when compiling the current
- # tool dependency package. See the package_matplotlib_1_2 repository in the test tool shed for a real-world example.
- for env_elem in action_elem:
- if env_elem.tag == 'repository':
- env_shell_file_paths = td_common_util.get_env_shell_file_paths( app, env_elem )
- if env_shell_file_paths:
- all_env_shell_file_paths.extend( env_shell_file_paths )
- if all_env_shell_file_paths:
- action_dict[ 'env_shell_file_paths' ] = all_env_shell_file_paths
- else:
- continue
- elif action_type == 'setup_virtualenv':
- # <action type="setup_virtualenv" />
- ## Install requirements from file requirements.txt of downloaded bundle - or -
- # <action type="setup_virtualenv">tools/requirements.txt</action>
- ## Install requirements from specified file from downloaded bundle -or -
- # <action type="setup_virtualenv">pyyaml==3.2.0
- # lxml==2.3.0</action>
- ## Manually specify contents of requirements.txt file to create dynamically.
- action_dict[ 'requirements' ] = td_common_util.evaluate_template( action_elem.text or 'requirements.txt', install_dir )
- elif action_type == 'autoconf':
- # Handle configure, make and make install allow providing configuration options
- if action_elem.text:
- configure_opts = td_common_util.evaluate_template( action_elem.text, install_dir )
- action_dict[ 'configure_opts' ] = configure_opts
- elif action_type == 'setup_r_environment':
- # setup an R environment.
- # <action type="setup_r_environment">
- # <repository name="package_r_3_0_1" owner="bgruening">
- # <package name="R" version="3.0.1" />
- # </repository>
- # <!-- allow installing an R packages -->
- # <package>https://github.com/bgruening/download_store/raw/master/DESeq2-1_0_18/BiocGe…</package>
- # </action>
- # Discover all child repository dependency tags and define the path to an env.sh file associated with each repository.
- # This will potentially update the value of the 'env_shell_file_paths' entry in action_dict.
- action_dict = td_common_util.get_env_shell_file_paths_from_setup_environment_elem( app, all_env_shell_file_paths, action_elem, action_dict )
- r_packages = list()
- for env_elem in action_elem:
- if env_elem.tag == 'package':
- r_packages.append( env_elem.text.strip() )
- if r_packages:
- action_dict[ 'r_packages' ] = r_packages
- else:
- continue
- elif action_type == 'setup_ruby_environment':
- # setup a Ruby environment.
- # <action type="setup_ruby_environment">
- # <repository name="package_ruby_2_0" owner="bgruening">
- # <package name="ruby" version="2.0" />
- # </repository>
- # <!-- allow downloading and installing an Ruby package from http://rubygems.org/ -->
- # <package>protk</package>
- # <package>protk=1.2.4</package>
- # <package>http://url-to-some-gem-file.de/protk.gem</package>
- # </action>
- # Discover all child repository dependency tags and define the path to an env.sh file associated with each repository.
- # This will potentially update the value of the 'env_shell_file_paths' entry in action_dict.
- action_dict = td_common_util.get_env_shell_file_paths_from_setup_environment_elem( app, all_env_shell_file_paths, action_elem, action_dict )
- ruby_package_tups = []
- for env_elem in action_elem:
- if env_elem.tag == 'package':
- #A valid gem definition can be:
- # protk=1.2.4
- # protk
- # ftp://ftp.gruening.de/protk.gem
- gem_token = env_elem.text.strip().split( '=' )
- if len( gem_token ) == 2:
- # version string
- gem_name = gem_token[ 0 ]
- gem_version = gem_token[ 1 ]
- ruby_package_tups.append( ( gem_name, gem_version ) )
- else:
- # gem name for rubygems.org without version number
- gem = env_elem.text.strip()
- ruby_package_tups.append( ( gem, None ) )
- if ruby_package_tups:
- action_dict[ 'ruby_package_tups' ] = ruby_package_tups
- else:
- continue
- elif action_type == 'setup_perl_environment':
- # setup a Perl environment.
- # <action type="setup_perl_environment">
- # <repository name="package_perl_5_18" owner="bgruening">
- # <package name="perl" version="5.18.1" />
- # </repository>
- # <!-- allow downloading and installing an Perl package from cpan.org-->
- # <package>XML::Parser</package>
- # <package>http://search.cpan.org/CPAN/authors/id/C/CJ/CJFIELDS/BioPerl-1.6.922.tar.gz</package>
- # </action>
- # Discover all child repository dependency tags and define the path to an env.sh file associated with each repository.
- # This will potentially update the value of the 'env_shell_file_paths' entry in action_dict.
- action_dict = td_common_util.get_env_shell_file_paths_from_setup_environment_elem( app, all_env_shell_file_paths, action_elem, action_dict )
- perl_packages = []
- for env_elem in action_elem:
- if env_elem.tag == 'package':
- # A valid package definition can be:
- # XML::Parser
- # http://search.cpan.org/CPAN/authors/id/C/CJ/CJFIELDS/BioPerl-1.6.922.tar.gz
- # Unfortunately CPAN does not support versioning, so if you want real reproducibility you need to specify
- # the tarball path and the right order of different tarballs manually.
- perl_packages.append( env_elem.text.strip() )
- if perl_packages:
- action_dict[ 'perl_packages' ] = perl_packages
- else:
- continue
- elif action_type == 'make_install':
- # make; make install; allow providing make options
- if action_elem.text:
- make_opts = td_common_util.evaluate_template( action_elem.text, install_dir )
- action_dict[ 'make_opts' ] = make_opts
- elif action_type == 'chmod':
- # Change the read, write, and execute bits on a file.
- # <action type="chmod">
- # <file mode="750">$INSTALL_DIR/bin/faToTwoBit</file>
- # </action>
- file_elems = action_elem.findall( 'file' )
- chmod_actions = []
- # A unix octal mode is the sum of the following values:
- # Owner:
- # 400 Read 200 Write 100 Execute
- # Group:
- # 040 Read 020 Write 010 Execute
- # World:
- # 004 Read 002 Write 001 Execute
- for file_elem in file_elems:
- # So by the above table, owner read/write/execute and group read permission would be 740.
- # Python's os.chmod uses base 10 modes, convert received unix-style octal modes to base 10.
- received_mode = int( file_elem.get( 'mode', 600 ), base=8 )
- # For added security, ensure that the setuid and setgid bits are not set.
- mode = received_mode & ~( stat.S_ISUID | stat.S_ISGID )
- file = td_common_util.evaluate_template( file_elem.text, install_dir )
- chmod_tuple = ( file, mode )
- chmod_actions.append( chmod_tuple )
- action_dict[ 'change_modes' ] = chmod_actions
- else:
- log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
- raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
+ action_dict = recipe_manager.prepare_step( app=app,
+ tool_dependency=tool_dependency,
+ action_type=action_type,
+ action_elem=action_elem,
+ action_dict=action_dict,
+ install_dir=install_dir,
+ is_binary_download=is_binary_download )
action_tuple = ( action_type, action_dict )
if action_type == 'set_environment':
if action_tuple not in actions:
@@ -797,15 +555,15 @@
actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
- if proprietary_fabfile_path:
+ if custom_fabfile_path is not None:
# TODO: this is not yet supported or functional, but when it is handle it using the fabric api.
- # run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=package_name )
+ # execute_custom_fabric_script( app, elem, custom_fabfile_path, install_dir, package_name=package_name )
raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
else:
tool_dependency = install_and_build_package_via_fabric( app, tool_dependency, actions_dict )
return tool_dependency
-def run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd ):
+def execute_custom_fabric_script( app, elem, custom_fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method.
@@ -834,10 +592,10 @@
else:
params_str = params_str.rstrip( ',' )
try:
- cmd = 'fab -f %s %s:%s' % ( proprietary_fabfile_path, method_name, params_str )
+ cmd = 'fab -f %s %s:%s' % ( custom_fabfile_path, method_name, params_str )
returncode, message = run_subprocess( app, cmd )
except Exception, e:
- return "Exception executing fabric script %s: %s. " % ( str( proprietary_fabfile_path ), str( e ) )
+ return "Exception executing fabric script %s: %s. " % ( str( custom_fabfile_path ), str( e ) )
if returncode:
return message
handle_environment_settings( app, tool_dependency, install_dir, cmd )
@@ -913,7 +671,7 @@
set_status=True )
if env_var_version == '1.0':
# Create this tool dependency's env.sh file.
- env_file_builder = fabric_util.EnvFileBuilder( install_dir )
+ env_file_builder = EnvFileBuilder( install_dir )
return_code = env_file_builder.append_line( make_executable=True, **env_var_dict )
if return_code:
error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \
diff -r 369bb0f7ebef50a72f36cf2f74d23f7d351d61c4 -r de54998e192b99bd70751d692969b508463e1805 lib/tool_shed/galaxy_install/tool_dependencies/recipe/recipe_manager.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/recipe_manager.py
@@ -0,0 +1,449 @@
+import logging
+import os
+import Queue
+import shutil
+import stat
+import subprocess
+import tempfile
+import threading
+import time
+
+from contextlib import contextmanager
+
+# TODO: eliminate the use of fabric here.
+from galaxy import eggs
+eggs.require( 'Fabric' )
+
+from fabric.operations import _AttributeString
+from fabric import state
+from fabric.api import prefix
+
+from galaxy.util import DATABASE_MAX_STRING_SIZE
+from galaxy.util import DATABASE_MAX_STRING_SIZE_PRETTY
+from galaxy.util import shrink_string_by_size
+from galaxy.util import unicodify
+
+from tool_shed.galaxy_install.tool_dependencies import td_common_util
+from tool_shed.galaxy_install.tool_dependencies.recipe import step_handler
+
+log = logging.getLogger( __name__ )
+
+
+class AsynchronousReader( threading.Thread ):
+ """
+ A helper class to implement asynchronous reading of a stream in a separate thread. Read lines are pushed
+ onto a queue to be consumed in another thread.
+ """
+
+ def __init__( self, fd, queue ):
+ threading.Thread.__init__( self )
+ self._fd = fd
+ self._queue = queue
+ self.lines = []
+
+ def run( self ):
+ """Read lines and put them on the queue."""
+ thread_lock = threading.Lock()
+ thread_lock.acquire()
+ for line in iter( self._fd.readline, '' ):
+ stripped_line = line.rstrip()
+ self.lines.append( stripped_line )
+ self._queue.put( stripped_line )
+ thread_lock.release()
+
+ def installation_complete( self ):
+ """Make sure there is more installation and compilation logging content expected."""
+ return not self.is_alive() and self._queue.empty()
+
+
+class EnvFileBuilder( object ):
+
+ def __init__( self, install_dir ):
+ self.install_dir = install_dir
+ self.return_code = 0
+
+ def append_line( self, make_executable=True, **kwd ):
+ env_var_dict = dict( **kwd )
+ env_entry, env_file = self.create_or_update_env_shell_file( self.install_dir, env_var_dict )
+ return_code = self.file_append( env_entry, env_file, make_executable=make_executable )
+ self.return_code = self.return_code or return_code
+ return self.return_code
+
+ @staticmethod
+ def create_or_update_env_shell_file( install_dir, env_var_dict ):
+ env_var_action = env_var_dict[ 'action' ]
+ env_var_value = env_var_dict[ 'value' ]
+ if env_var_action in [ 'prepend_to', 'set_to', 'append_to' ]:
+ env_var_name = env_var_dict[ 'name' ]
+ if env_var_action == 'prepend_to':
+ changed_value = '%s:$%s' % ( env_var_value, env_var_name )
+ elif env_var_action == 'set_to':
+ changed_value = '%s' % env_var_value
+ elif env_var_action == 'append_to':
+ changed_value = '$%s:%s' % ( env_var_name, env_var_value )
+ line = "%s=%s; export %s" % ( env_var_name, changed_value, env_var_name )
+ elif env_var_action == "source":
+ line = "if [ -f %s ] ; then . %s ; fi" % ( env_var_value, env_var_value )
+ else:
+ raise Exception( "Unknown shell file action %s" % env_var_action )
+ env_shell_file_path = os.path.join( install_dir, 'env.sh' )
+ return line, env_shell_file_path
+
+ def file_append( self, text, file_path, make_executable=True ):
+ """
+ Append a line to a file unless the line already exists in the file. This method creates the file if
+ it doesn't exist. If make_executable is True, the permissions on the file are set to executable by
+ the owner.
+ """
+ file_dir = os.path.dirname( file_path )
+ if not os.path.exists( file_dir ):
+ try:
+ os.makedirs( file_dir )
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ if os.path.exists( file_path ):
+ try:
+ new_env_file_contents = []
+ env_file_contents = file( file_path, 'r' ).readlines()
+ # Clean out blank lines from the env.sh file.
+ for line in env_file_contents:
+ line = line.rstrip()
+ if line:
+ new_env_file_contents.append( line )
+ env_file_contents = new_env_file_contents
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ else:
+ env_file_handle = open( file_path, 'w' )
+ env_file_handle.close()
+ env_file_contents = []
+ if make_executable:
+ # Explicitly set the file's executable bits.
+ try:
+ os.chmod( file_path, int( '111', base=8 ) | os.stat( file_path )[ stat.ST_MODE ] )
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ # Convert the received text to a list, in order to support adding one or more lines to the file.
+ if isinstance( text, basestring ):
+ text = [ text ]
+ for line in text:
+ line = line.rstrip()
+ if line and line not in env_file_contents:
+ env_file_contents.append( line )
+ try:
+ file( file_path, 'w' ).write( '\n'.join( env_file_contents ) )
+ except Exception, e:
+ log.exception( str( e ) )
+ return 1
+ return 0
+
+ def handle_action_shell_file_paths( self, action_dict ):
+ shell_file_paths = action_dict.get( 'action_shell_file_paths', [] )
+ for shell_file_path in shell_file_paths:
+ self.append_line( action="source", value=shell_file_path )
+
+
+class InstallEnvironment( object ):
+ """Object describing the environment built up as part of the process of building and installing a package."""
+
+
+ def __init__( self ):
+ self.env_shell_file_paths = []
+
+ def __call__( self, install_dir ):
+ with settings( warn_only=True, **td_common_util.get_env_var_values( install_dir ) ):
+ with prefix( self.__setup_environment() ):
+ yield
+
+ def add_env_shell_file_paths( self, paths ):
+ for path in paths:
+ self.env_shell_file_paths.append( str( path ) )
+
+ def build_command( self, command, action_type='shell_command' ):
+ """
+ Build command line for execution from simple command, but
+ configuring environment described by this object.
+ """
+ env_cmds = self.environment_commands( action_type )
+ return '\n'.join( env_cmds + [ command ] )
+
+ def close_file_descriptor( self, fd ):
+ """Attempt to close a file descriptor."""
+ start_timer = time.time()
+ error = ''
+ while True:
+ try:
+ fd.close()
+ break
+ except IOError, e:
+ # Undoubtedly close() was called during a concurrent operation on the same file object.
+ log.debug( 'Error closing file descriptor: %s' % str( e ) )
+ time.sleep( .5 )
+ current_wait_time = time.time() - start_timer
+ if current_wait_time >= 600:
+ error = 'Error closing file descriptor: %s' % str( e )
+ break
+ return error
+
+ def enqueue_output( self, stdout, stdout_queue, stderr, stderr_queue ):
+ """
+ This method places streamed stdout and stderr into a threaded IPC queue target. Received data
+ is printed and saved to that thread's queue. The calling thread can then retrieve the data using
+ thread.stdout and thread.stderr.
+ """
+ stdout_logger = logging.getLogger( 'fabric_util.STDOUT' )
+ stderr_logger = logging.getLogger( 'fabric_util.STDERR' )
+ for line in iter( stdout.readline, '' ):
+ output = line.rstrip()
+ stdout_logger.debug( output )
+ stdout_queue.put( output )
+ stdout_queue.put( None )
+ for line in iter( stderr.readline, '' ):
+ output = line.rstrip()
+ stderr_logger.debug( output )
+ stderr_queue.put( output )
+ stderr_queue.put( None )
+
+ def environment_commands( self, action_type ):
+ """Build a list of commands used to construct the environment described by this object."""
+ cmds = []
+ for env_shell_file_path in self.env_shell_file_paths:
+ if os.path.exists( env_shell_file_path ):
+ for env_setting in open( env_shell_file_path ):
+ cmds.append( env_setting.strip( '\n' ) )
+ else:
+ log.debug( 'Invalid file %s specified, ignoring %s action.' % ( str( env_shell_file_path ), str( action_type ) ) )
+ return cmds
+
+ def environment_dict( self, action_type='template_command' ):
+ env_vars = dict()
+ for env_shell_file_path in self.env_shell_file_paths:
+ if os.path.exists( env_shell_file_path ):
+ for env_setting in open( env_shell_file_path ):
+ env_string = env_setting.split( ';' )[ 0 ]
+ env_name, env_path = env_string.split( '=' )
+ env_vars[ env_name ] = env_path
+ else:
+ log.debug( 'Invalid file %s specified, ignoring template_command action.' % str( env_shell_file_path ) )
+ return env_vars
+
+ def handle_command( self, app, tool_dependency, install_dir, cmd, return_output=False ):
+ """Handle a command and log the results."""
+ context = app.install_model.context
+ command = str( cmd )
+ output = self.handle_complex_command( command )
+ self.log_results( cmd, output, os.path.join( install_dir, td_common_util.INSTALLATION_LOG ) )
+ stdout = output.stdout
+ stderr = output.stderr
+ if len( stdout ) > DATABASE_MAX_STRING_SIZE:
+ print "Length of stdout > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY )
+ stdout = shrink_string_by_size( stdout, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+ if len( stderr ) > DATABASE_MAX_STRING_SIZE:
+ print "Length of stderr > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY )
+ stderr = shrink_string_by_size( stderr, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+ if output.return_code not in [ 0 ]:
+ tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR
+ if stderr:
+ tool_dependency.error_message = unicodify( stderr )
+ elif stdout:
+ tool_dependency.error_message = unicodify( stdout )
+ else:
+ # We have a problem if there was no stdout and no stderr.
+ tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % \
+ ( str( cmd ), str( output.return_code ) )
+ context.add( tool_dependency )
+ context.flush()
+ if return_output:
+ return output
+ return output.return_code
+
+ def handle_complex_command( self, command ):
+ """
+ Wrap subprocess.Popen in such a way that the stderr and stdout from running a shell command will
+ be captured and logged in nearly real time. This is similar to fabric.local, but allows us to
+ retain control over the process. This method is named "complex" because it uses queues and
+ threads to execute a command while capturing and displaying the output.
+ """
+ # Launch the command as subprocess. A bufsize of 1 means line buffered.
+ process_handle = subprocess.Popen( str( command ),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=1,
+ close_fds=False,
+ shell=True,
+ cwd=state.env[ 'lcwd' ] )
+ pid = process_handle.pid
+ # Launch the asynchronous readers of the process' stdout and stderr.
+ stdout_queue = Queue.Queue()
+ stdout_reader = AsynchronousReader( process_handle.stdout, stdout_queue )
+ stdout_reader.start()
+ stderr_queue = Queue.Queue()
+ stderr_reader = AsynchronousReader( process_handle.stderr, stderr_queue )
+ stderr_reader.start()
+ # Place streamed stdout and stderr into a threaded IPC queue target so it can
+ # be printed and stored for later retrieval when generating the INSTALLATION.log.
+ stdio_thread = threading.Thread( target=self.enqueue_output,
+ args=( process_handle.stdout,
+ stdout_queue,
+ process_handle.stderr,
+ stderr_queue ) )
+ thread_lock = threading.Lock()
+ thread_lock.acquire()
+ stdio_thread.start()
+ # Check the queues for output until there is nothing more to get.
+ start_timer = time.time()
+ while not stdout_reader.installation_complete() or not stderr_reader.installation_complete():
+ # Show what we received from standard output.
+ while not stdout_queue.empty():
+ try:
+ line = stdout_queue.get()
+ except Queue.Empty:
+ line = None
+ break
+ if line:
+ print line
+ start_timer = time.time()
+ else:
+ break
+ # Show what we received from standard error.
+ while not stderr_queue.empty():
+ try:
+ line = stderr_queue.get()
+ except Queue.Empty:
+ line = None
+ break
+ if line:
+ print line
+ start_timer = time.time()
+ else:
+ stderr_queue.task_done()
+ break
+ # Sleep a bit before asking the readers again.
+ time.sleep( .1 )
+ current_wait_time = time.time() - start_timer
+ if stdout_queue.empty() and stderr_queue.empty() and current_wait_time > td_common_util.NO_OUTPUT_TIMEOUT:
+ err_msg = "\nShutting down process id %s because it generated no output for the defined timeout period of %.1f seconds.\n" % \
+ ( pid, td_common_util.NO_OUTPUT_TIMEOUT )
+ stderr_reader.lines.append( err_msg )
+ process_handle.kill()
+ break
+ thread_lock.release()
+ # Wait until each of the threads we've started terminate. The following calls will block each thread
+ # until it terminates either normally, through an unhandled exception, or until the timeout occurs.
+ stdio_thread.join( td_common_util.NO_OUTPUT_TIMEOUT )
+ stdout_reader.join( td_common_util.NO_OUTPUT_TIMEOUT )
+ stderr_reader.join( td_common_util.NO_OUTPUT_TIMEOUT )
+ # Close subprocess' file descriptors.
+ error = self.close_file_descriptor( process_handle.stdout )
+ error = self.close_file_descriptor( process_handle.stderr )
+ stdout = '\n'.join( stdout_reader.lines )
+ stderr = '\n'.join( stderr_reader.lines )
+ # Handle error condition (deal with stdout being None, too)
+ output = _AttributeString( stdout.strip() if stdout else "" )
+ errors = _AttributeString( stderr.strip() if stderr else "" )
+ # Make sure the process has finished.
+ process_handle.poll()
+ output.return_code = process_handle.returncode
+ output.stderr = errors
+ return output
+
+ def log_results( self, command, fabric_AttributeString, file_path ):
+ """Write attributes of fabric.operations._AttributeString to a specified log file."""
+ if os.path.exists( file_path ):
+ logfile = open( file_path, 'ab' )
+ else:
+ logfile = open( file_path, 'wb' )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDOUT\n' % command )
+ logfile.write( str( fabric_AttributeString.stdout ) )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( "\n#############################################\n" )
+ logfile.write( '%s\nSTDERR\n' % command )
+ logfile.write( str( fabric_AttributeString.stderr ) )
+ logfile.write( "\n#############################################\n" )
+ logfile.close()
+
+ @contextmanager
+ def make_tmp_dir( self ):
+ work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-mtd" )
+ yield work_dir
+ if os.path.exists( work_dir ):
+ try:
+ shutil.rmtree( work_dir )
+ except Exception, e:
+ log.exception( str( e ) )
+
+ def __setup_environment( self ):
+ return "&&".join( [ ". %s" % file for file in self.__valid_env_shell_file_paths() ] )
+
+ def __valid_env_shell_file_paths( self ):
+ return [ file for file in self.env_shell_file_paths if os.path.exists( file ) ]
+
+
+class RecipeManager( object ):
+
+ def __init__( self ):
+ self.step_handlers_by_type = self.load_step_handlers()
+
+ def get_step_handler_by_type( self, type ):
+ return self.step_handlers_by_type.get( type, None )
+
+ def execute_step( self, app, tool_dependency, package_name, actions, action_type, action_dict, filtered_actions,
+ env_file_builder, install_environment, work_dir, install_dir, current_dir=None, initial_download=False ):
+ if actions:
+ step_handler = self.get_step_handler_by_type( action_type )
+ tool_dependency, filtered_actions, dir = step_handler.execute_step( app=app,
+ tool_dependency=tool_dependency,
+ package_name=package_name,
+ actions=actions,
+ action_dict=action_dict,
+ filtered_actions=filtered_actions,
+ env_file_builder=env_file_builder,
+ install_environment=install_environment,
+ work_dir=work_dir,
+ install_dir=install_dir,
+ current_dir=current_dir,
+ initial_download=initial_download )
+ else:
+ dir = None
+ return tool_dependency, filtered_actions, dir
+
+ def load_step_handlers( self ):
+ step_handlers_by_type = dict( autoconf=step_handler.Autoconf(),
+ change_directory=step_handler.ChangeDirectory(),
+ chmod=step_handler.Chmod(),
+ download_binary=step_handler.DownloadBinary(),
+ download_by_url=step_handler.DownloadByUrl(),
+ download_file=step_handler.DownloadFile(),
+ make_directory=step_handler.MakeDirectory(),
+ make_install=step_handler.MakeInstall(),
+ move_directory_files=step_handler.MoveDirectoryFiles(),
+ move_file=step_handler.MoveFile(),
+ set_environment=step_handler.SetEnvironment(),
+ set_environment_for_install=step_handler.SetEnvironmentForInstall(),
+ setup_perl_environment=step_handler.SetupPerlEnvironment(),
+ setup_r_environment=step_handler.SetupREnvironment(),
+ setup_ruby_environment=step_handler.SetupRubyEnvironment(),
+ setup_virtual_env=step_handler.SetupVirtualEnv(),
+ shell_command=step_handler.ShellCommand(),
+ template_command=step_handler.TemplateCommand() )
+ return step_handlers_by_type
+
+ def prepare_step( self, app, tool_dependency, action_type, action_elem, action_dict, install_dir, is_binary_download ):
+ """
+ Prepare the recipe step for later execution. This generally alters the received action_dict
+ with new information needed during this step's execution.
+ """
+ if action_elem is not None:
+ step_handler = self.get_step_handler_by_type( action_type )
+ action_dict = step_handler.prepare_step( app=app,
+ tool_dependency=tool_dependency,
+ action_elem=action_elem,
+ action_dict=action_dict,
+ install_dir=install_dir,
+ is_binary_download=is_binary_download )
+ return action_dict
+
\ No newline at end of file
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
[View Less]
1
0