1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/149dbd29acbc/
Changeset: 149dbd29acbc
User: Dave Bouvier
Date: 2013-09-09 19:44:03
Summary: When the install and test framework has finished testing a repository, use deactivate instead of uninstall, so that installed tool dependencies are retained.
Affected #: 1 file
diff -r 1a868f109ed985b738c709422dbd6cccbc5cdd32 -r 149dbd29acbce6396a969662a9b899cdbb7bdb46 test/install_and_test_tool_shed_repositories/base/twilltestcase.py
--- a/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
+++ b/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
@@ -6,7 +6,9 @@
log = logging.getLogger( __name__ )
+
class InstallTestRepository( TwillTestCase ):
+
def setUp( self ):
# Security helper
id_secret = os.environ.get( 'GALAXY_INSTALL_TEST_SECRET', 'changethisinproductiontoo' )
@@ -24,21 +26,22 @@
self.galaxy_tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
self.home()
- def initiate_installation_process( self,
- install_tool_dependencies=False,
- install_repository_dependencies=True,
- no_changes=True,
+
+ def initiate_installation_process( self,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ no_changes=True,
new_tool_panel_section=None ):
html = self.last_page()
- # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
- # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
- # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
+ # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
+ # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
+ # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
# admin_toolshed controller.
install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
if install_parameters:
iri_ids = install_parameters.group(1)
# In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']"
- # This regex ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id
+ # This regex ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id
# will handle them correctly. It's safe to pass the cleaned list to manage_repositories, because it can parse
# comma-separated values.
repository_ids = str( iri_ids )
@@ -49,8 +52,9 @@
( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
self.visit_url( url )
return util.listify( repository_ids )
- def install_repository( self, repository_info_dict, install_tool_dependencies=True, install_repository_dependencies=True,
- strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[],
+
+ def install_repository( self, repository_info_dict, install_tool_dependencies=True, install_repository_dependencies=True,
+ strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[],
post_submit_strings_displayed=[], new_tool_panel_section=None, **kwd ):
name = repository_info_dict[ 'name' ]
owner = repository_info_dict[ 'owner' ]
@@ -59,23 +63,23 @@
tool_shed_url = repository_info_dict[ 'tool_shed_url' ]
preview_params = urllib.urlencode( dict( repository_id=encoded_repository_id, changeset_revision=changeset_revision ) )
self.visit_url( '%s/repository/preview_tools_in_changeset?%s' % ( tool_shed_url, preview_params ) )
- install_params = urllib.urlencode( dict( repository_ids=encoded_repository_id,
+ install_params = urllib.urlencode( dict( repository_ids=encoded_repository_id,
changeset_revisions=changeset_revision,
galaxy_url=self.url ) )
- # If the tool shed does not have the same hostname as the Galaxy server being used for these tests,
- # twill will not carry over previously set cookies for the Galaxy server when following the
- # install_repositories_by_revision redirect, so we have to include 403 in the allowed HTTP
+ # If the tool shed does not have the same hostname as the Galaxy server being used for these tests,
+ # twill will not carry over previously set cookies for the Galaxy server when following the
+ # install_repositories_by_revision redirect, so we have to include 403 in the allowed HTTP
# status codes and log in again.
url = '%s/repository/install_repositories_by_revision?%s' % ( tool_shed_url, install_params )
self.visit_url( url, allowed_codes=[ 200, 403 ] )
self.logout()
self.login( email='test(a)bx.psu.edu', username='test' )
- install_params = urllib.urlencode( dict( repository_ids=encoded_repository_id,
+ install_params = urllib.urlencode( dict( repository_ids=encoded_repository_id,
changeset_revisions=changeset_revision,
tool_shed_url=tool_shed_url ) )
url = '/admin_toolshed/prepare_for_install?%s' % install_params
self.visit_url( url )
- # This section is tricky, due to the way twill handles form submission. The tool dependency checkbox needs to
+ # This section is tricky, due to the way twill handles form submission. The tool dependency checkbox needs to
# be hacked in through tc.browser, putting the form field in kwd doesn't work.
if 'install_tool_dependencies' in self.last_page():
form = tc.browser.get_form( 'select_tool_panel_section' )
@@ -105,12 +109,14 @@
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
repository_ids = self.initiate_installation_process( new_tool_panel_section=new_tool_panel_section )
self.wait_for_repository_installation( repository_ids )
+
def visit_url( self, url, allowed_codes=[ 200 ] ):
new_url = tc.go( url )
return_code = tc.browser.get_code()
assert return_code in allowed_codes, 'Invalid HTTP return code %s, allowed codes: %s' % \
- ( return_code, ', '.join( str( code ) for code in allowed_codes ) )
+ ( return_code, ', '.join( str( code ) for code in allowed_codes ) )
return new_url
+
def wait_for_repository_installation( self, repository_ids ):
final_states = [ model.ToolShedRepository.installation_status.ERROR,
model.ToolShedRepository.installation_status.INSTALLED ]
@@ -129,12 +135,12 @@
( timeout_counter, repository.status ) )
break
time.sleep( 1 )
+
def uninstall_repository( self, installed_repository ):
url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_url( url )
- tc.fv ( 1, "remove_from_disk", 'true' )
+ tc.fv ( 1, "remove_from_disk", 'false' )
tc.submit( 'deactivate_or_uninstall_repository_button' )
strings_displayed = [ 'The repository named' ]
- strings_displayed.append( 'has been uninstalled' )
+ strings_displayed.append( 'has been deactivated' )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
-
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/97d020901403/
Changeset: 97d020901403
Branch: stable
User: carlfeberhard
Date: 2013-09-09 17:20:40
Summary: Fix to dbkey select in library upload when 'unspecified' is not in dbkey list
Affected #: 1 file
diff -r 9fe9aa6ac504bc7703ba5d465e68170d79e89905 -r 97d0209014039d0f9aaa7f48a188708de1829786 templates/webapps/galaxy/library/common/common.mako
--- a/templates/webapps/galaxy/library/common/common.mako
+++ b/templates/webapps/galaxy/library/common/common.mako
@@ -275,8 +275,9 @@
# move unspecified to the first option and set as default if not last_used_build
#TODO: remove when we decide on a common dbkey selector widget
unspecified = ('unspecified (?)', '?')
- dbkeys.remove( unspecified )
- dbkeys.insert( 0, unspecified )
+ if unspecified in dbkeys:
+ dbkeys.remove( unspecified )
+ dbkeys.insert( 0, unspecified )
default_selected = last_used_build or '?'
%>
%for dbkey in dbkeys:
https://bitbucket.org/galaxy/galaxy-central/commits/b62b2b4daeac/
Changeset: b62b2b4daeac
User: carlfeberhard
Date: 2013-09-09 17:21:06
Summary: Merge stable
Affected #: 1 file
diff -r 354fb3e262bbf08cf5b77f70da1bb45ee947841c -r b62b2b4daeacc819016d6208078fa1fdd5b467c4 templates/webapps/galaxy/library/common/common.mako
--- a/templates/webapps/galaxy/library/common/common.mako
+++ b/templates/webapps/galaxy/library/common/common.mako
@@ -275,8 +275,9 @@
# move unspecified to the first option and set as default if not last_used_build
#TODO: remove when we decide on a common dbkey selector widget
unspecified = ('unspecified (?)', '?')
- dbkeys.remove( unspecified )
- dbkeys.insert( 0, unspecified )
+ if unspecified in dbkeys:
+ dbkeys.remove( unspecified )
+ dbkeys.insert( 0, unspecified )
default_selected = last_used_build or '?'
%>
%for dbkey in dbkeys:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/354fb3e262bb/
Changeset: 354fb3e262bb
User: Richard Burhans
Date: 2013-09-07 00:03:51
Summary: Fixed initial installation of virtualenv
Affected #: 1 file
diff -r 8f3110cc7851e67256a6d24d7e41ad65e5a507dd -r 354fb3e262bbf08cf5b77f70da1bb45ee947841c lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -155,12 +155,10 @@
if not os.path.exists( venv_dir ):
with make_tmp_dir() as work_dir:
downloaded_filename = VIRTUALENV_URL.rsplit('/', 1)[-1]
- downloaded_file_path = td_common_util.url_download( work_dir, downloaded_filename, VIRTUALENV_URL )
- if td_common_util.istar( downloaded_file_path ):
- td_common_util.extract_tar( downloaded_file_path, work_dir )
- dir = td_common_util.tar_extraction_directory( work_dir, downloaded_filename )
- else:
- log.error( "Failed to download virtualenv: Downloaded file '%s' is not a tar file", downloaded_filename )
+ try:
+ dir = td_common_util.url_download( work_dir, downloaded_filename, VIRTUALENV_URL )
+ except:
+ log.error( "Failed to download virtualenv: td_common_util.url_download( '%s', '%s', '%s' ) threw an exception", work_dir, downloaded_filename, VIRTUALENV_URL )
return False
full_path_to_dir = os.path.abspath( os.path.join( work_dir, dir ) )
shutil.move( full_path_to_dir, venv_dir )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8f3110cc7851/
Changeset: 8f3110cc7851
User: dan
Date: 2013-09-06 23:09:58
Summary: Display command-line on dataset info page for admin users.
Affected #: 1 file
diff -r 1a5f25751066598601aef4ad34da4b3bea8deaca -r 8f3110cc7851e67256a6d24d7e41ad65e5a507dd templates/show_params.mako
--- a/templates/show_params.mako
+++ b/templates/show_params.mako
@@ -123,6 +123,9 @@
%if trans.user_is_admin() or trans.app.config.expose_dataset_path:
<tr><td>Full Path:</td><td>${hda.file_name | h}</td></tr>
%endif
+ %if job and job.command_line and trans.user_is_admin():
+ <tr><td>Job Command-Line:</td><td>${ job.command_line | h }</td></tr>
+ %endif
</table><br />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f82d49deb6d0/
Changeset: f82d49deb6d0
User: Dave Bouvier
Date: 2013-09-06 22:53:07
Summary: Set the tool depepndency path for repository installation and testing.
Affected #: 1 file
diff -r f4a839856cfb06c01845cdd96f2893f4b70250bb -r f82d49deb6d05648d2ab114cff6d689b38806d65 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -567,6 +567,7 @@
if tool_dependency_dir is None:
tool_dependency_dir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' ] = tool_dependency_dir
+ os.environ[ 'GALAXY_TOOL_DEPENDENCY_DIR' ] = tool_dependency_dir
if 'GALAXY_INSTALL_TEST_DBURI' in os.environ:
database_connection = os.environ[ 'GALAXY_INSTALL_TEST_DBURI' ]
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f4a839856cfb/
Changeset: f4a839856cfb
User: greg
Date: 2013-09-06 15:38:12
Summary: Partial framework support for enhanced tool dependency definitions that enable installation of binaries into specified architectures. More coming soon...
Affected #: 8 files
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -252,7 +252,7 @@
tool_shed_repository.uninstalled = True
# Remove all installed tool dependencies, but don't touch any repository dependencies..
for tool_dependency in tool_shed_repository.installed_tool_dependencies:
- uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency )
if error_message:
errors = '%s %s' % ( errors, error_message )
tool_shed_repository.deleted = True
@@ -1559,7 +1559,7 @@
if tool_dependency.can_uninstall:
tool_dependencies_for_uninstallation.append( tool_dependency )
for tool_dependency in tool_dependencies_for_uninstallation:
- uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency )
+ uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency )
if error_message:
errors = True
message = '%s %s' % ( message, error_message )
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -281,8 +281,6 @@
source_dir=os.path.join( action_dict[ 'source_directory' ] ),
destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
elif action_type == 'move_file':
- # TODO: Remove this hack that resets current_dir so that the pre-compiled bwa binary can be found.
- # current_dir = '/Users/gvk/workspaces_2008/bwa/bwa-0.5.9'
td_common_util.move_file( current_dir=current_dir,
source=os.path.join( action_dict[ 'source' ] ),
destination_dir=os.path.join( action_dict[ 'destination' ] ) )
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -12,6 +12,7 @@
from tool_shed.util import encoding_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import xml_util
+from tool_shed.galaxy_install.tool_dependencies import td_common_util
from galaxy.model.orm import and_
from galaxy.web import url_for
from galaxy.util import asbool
@@ -106,6 +107,92 @@
text = common_util.tool_shed_get( app, tool_shed_url, url )
return text
+def handle_complex_repository_dependency_for_package( app, elem, package_name, package_version, tool_shed_repository ):
+ tool_dependency = None
+ tool_shed = elem.attrib[ 'toolshed' ]
+ required_repository_name = elem.attrib[ 'name' ]
+ required_repository_owner = elem.attrib[ 'owner' ]
+ default_required_repository_changeset_revision = elem.attrib[ 'changeset_revision' ]
+ required_repository = get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app,
+ tool_shed,
+ required_repository_name,
+ required_repository_owner,
+ default_required_repository_changeset_revision )
+ tmp_filename = None
+ if required_repository:
+ required_repository_changeset_revision = required_repository.installed_changeset_revision
+ # Define the installation directory for the required tool dependency package in the required repository.
+ required_repository_package_install_dir = \
+ get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository_name,
+ repository_owner=required_repository_owner,
+ repository_changeset_revision=required_repository_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ # Define the this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the
+ # required repository's installed tool dependency package.
+ dependent_install_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package.
+ # We can get everything we need from the discovered installed required_repository.
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED,
+ app.model.ToolShedRepository.installation_status.INSTALLED ]:
+ if not os.path.exists( required_repository_package_install_dir ):
+ print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir )
+ repo_files_dir = required_repository.repo_files_directory( app )
+ tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' )
+ if tool_dependencies_config:
+ config_to_use = tool_dependencies_config
+ else:
+ message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \
+ ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
+ raise Exception( message )
+ else:
+ # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision
+ # should be updated if it's not current.
+ text = get_updated_changeset_revisions_from_tool_shed( app=app,
+ tool_shed_url=tool_shed,
+ name=required_repository_name,
+ owner=required_repository_owner,
+ changeset_revision=required_repository_changeset_revision )
+ if text:
+ updated_changeset_revisions = listify( text )
+ # The list of changeset revisions is in reverse order, so the newest will be first.
+ required_repository_changeset_revision = updated_changeset_revisions[ 0 ]
+ # Make a call to the tool shed to get the required repository's tool_dependencies.xml file.
+ tmp_filename = create_temporary_tool_dependencies_config( app,
+ tool_shed,
+ required_repository_name,
+ required_repository_owner,
+ required_repository_changeset_revision )
+ config_to_use = tmp_filename
+ tool_dependency, actions_dict = populate_actions_dict( app=app,
+ dependent_install_dir=dependent_install_dir,
+ required_install_dir=required_repository_package_install_dir,
+ tool_shed_repository=tool_shed_repository,
+ required_repository=required_repository,
+ package_name=package_name,
+ package_version=package_version,
+ tool_dependencies_config=config_to_use )
+ if tmp_filename:
+ try:
+ os.remove( tmp_filename )
+ except:
+ pass
+ # Install and build the package via fabric.
+ install_and_build_package_via_fabric( app, tool_dependency, actions_dict )
+ else:
+ message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \
+ ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) )
+ raise Exception( message )
+ return tool_dependency
+
def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem, required_repository ):
"""
Populate a list of actions for creating an env.sh file for a dependent repository. The received elem is the <package> tag set associated
@@ -222,88 +309,9 @@
for package_elem in elem:
if package_elem.tag == 'repository':
# We have a complex repository dependency definition.
- tool_shed = package_elem.attrib[ 'toolshed' ]
- required_repository_name = package_elem.attrib[ 'name' ]
- required_repository_owner = package_elem.attrib[ 'owner' ]
- default_required_repository_changeset_revision = package_elem.attrib[ 'changeset_revision' ]
- required_repository = get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app,
- tool_shed,
- required_repository_name,
- required_repository_owner,
- default_required_repository_changeset_revision )
- tmp_filename = None
- if required_repository:
- required_repository_changeset_revision = required_repository.installed_changeset_revision
- # Define the installation directory for the required tool dependency package in the required repository.
- required_repository_package_install_dir = \
- get_tool_dependency_install_dir( app=app,
- repository_name=required_repository_name,
- repository_owner=required_repository_owner,
- repository_changeset_revision=required_repository_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- # Define the this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the
- # required repository's installed tool dependency package.
- dependent_install_dir = get_tool_dependency_install_dir( app=app,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package.
- # We can get everything we need from the discovered installed required_repository.
- if required_repository.status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED,
- app.model.ToolShedRepository.installation_status.INSTALLED ]:
- if not os.path.exists( required_repository_package_install_dir ):
- print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir )
- repo_files_dir = required_repository.repo_files_directory( app )
- tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' )
- if tool_dependencies_config:
- config_to_use = tool_dependencies_config
- else:
- message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \
- ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
- raise Exception( message )
- else:
- # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision
- # should be updated if it's not current.
- text = get_updated_changeset_revisions_from_tool_shed( app=app,
- tool_shed_url=tool_shed,
- name=required_repository_name,
- owner=required_repository_owner,
- changeset_revision=required_repository_changeset_revision )
- if text:
- updated_changeset_revisions = listify( text )
- # The list of changeset revisions is in reverse order, so the newest will be first.
- required_repository_changeset_revision = updated_changeset_revisions[ 0 ]
- # Make a call to the tool shed to get the required repository's tool_dependencies.xml file.
- tmp_filename = create_temporary_tool_dependencies_config( app,
- tool_shed,
- required_repository_name,
- required_repository_owner,
- required_repository_changeset_revision )
- config_to_use = tmp_filename
- tool_dependency, actions_dict = populate_actions_dict( app=app,
- dependent_install_dir=dependent_install_dir,
- required_install_dir=required_repository_package_install_dir,
- tool_shed_repository=tool_shed_repository,
- required_repository=required_repository,
- package_name=package_name,
- package_version=package_version,
- tool_dependencies_config=config_to_use )
- if tmp_filename:
- try:
- os.remove( tmp_filename )
- except:
- pass
- # Install and build the package via fabric.
- install_and_build_package_via_fabric( app, tool_dependency, actions_dict )
- else:
- message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \
- ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) )
- raise Exception( message )
+ rd_tool_dependency = handle_complex_repository_dependency_for_package( app, package_elem, package_name, package_version, tool_shed_repository )
+ if rd_tool_dependency and rd_tool_dependency.status == app.model.ToolDependency.installation_status.ERROR:
+ print "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message )
elif package_elem.tag == 'install':
# <install version="1.0">
# Get the installation directory for tool dependencies that will be installed for the received tool_shed_repository.
@@ -333,114 +341,32 @@
type='package',
status=app.model.ToolDependency.installation_status.INSTALLING,
set_status=True )
- # Get the information that defines the current platform.
+ # Get the information about the current platform in case the tool dependency definition includes tag sets for installing
+ # compiled binaries.
platform_info_dict = tool_dependency_util.get_platform_info_dict()
if package_install_version == '1.0':
- # Handle tool dependency installation using a fabric method included in the Galaxy framework. The first thing we do
- # is check the installation architecture to see if we have a precompiled binary that works on the target system.
+ # Handle tool dependency installation using a fabric method included in the Galaxy framework.
+ actions_elem_tuples = td_common_util.parse_package_elem( package_elem,
+ platform_info_dict=platform_info_dict,
+ include_after_install_actions=True )
+ # At this point we have a list of <actions> elems that are either defined within an <actions_group> tag set with <actions>
+ # sub-elements that contains os and architecture attributes filtered by the platform into which the appropriate compiled
+ # binary will be installed, or not defined within an <actions_group> tag set and not filtered.
binary_installed = False
- actions_elem_tuples = []
- # Build a list of grouped and ungrouped <actions> tagsets to be processed in the order they are defined in the
- # tool_dependencies.xml file.
- for elem in package_elem:
- # Default to not treating actions as grouped.
- grouped = False
- # Skip any element that is not <actions> or <actions_group>. This will also skip comments and <readme> tags.
- if elem.tag not in [ 'actions', 'actions_group' ]:
- continue
- if elem.tag == 'actions':
- # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system.
- grouped = False
- actions_elem_tuples.append( ( grouped, elem ) )
- else:
- # Record the number of <actions> elements, in order to filter out any <action> elements that precede <actions>
- # elements.
- actions_elem_count = len( elem.findall( 'actions' ) )
- # Record the number of <actions> elements that have architecture and os specified, in order to filter out any
- # platform-independent <actions> elements that come before platform-specific <actions> elements. This call to
- # elem.findall is filtered by tags that have both the os and architecture specified.
- # For more details, see http://docs.python.org/2/library/xml.etree.elementtree.html Section 19.7.2.1.
- platform_actions_element_count = len( elem.findall( 'actions[@architecture][@os]' ) )
- platform_actions_elements_processed = 0
- actions_elems_processed = 0
- # We have an actions_group element, and its child <actions> elements should therefore be compared with the current
- # operating system and processor architecture.
- grouped = True
- # The tagsets that will go into the actions_elem_list are those that install a precompiled binary if the
- # architecture and operating system match its defined attributes. If precompiled binary is not installed
- # the first <actions> tag following those that have the os and architecture attributes will be processed
- # in order to install and compile the source.
- actions_elem_list = []
- # The tagsets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These
- # will only be processed if they are at the end of the <actions_group> tagset. See below for details.
- after_install_actions = []
- platform_independent_actions = []
- # Loop through the <actions_group> element and build the actions_elem_list and the after_install_actions list.
- for child_element in elem:
- if child_element.tag == 'actions':
- actions_elems_processed += 1
- system = child_element.get( 'os' )
- architecture = child_element.get( 'architecture' )
- # Skip <actions> tags that have only one of architecture or os specified, in order for the count in
- # platform_actions_elements_processed to remain accurate.
- if ( system and not architecture ) or ( architecture and not system ):
- log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' )
- continue
- # Since we are inside an <actions_group> tagset, compare it with our current platform information and filter
- # the <actions> tagsets that don't match. Require both the os and architecture attributes to be defined in
- # order to find a match.
- if system and architecture:
- platform_actions_elements_processed += 1
- # If either the os or architecture do not match the platform, this <actions> tag will not be considered
- # a match. Skip it and proceed with checking the next one.
- if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture:
- continue
- else:
- # <actions> tags without both os and architecture attributes are only allowed to be specified after
- # platform-specific <actions> tags. If we find a platform-independent <actions> tag before all
- # platform-specific <actions> tags have been processed, log a message stating this and skip to the
- # next <actions> tag.
- if platform_actions_elements_processed < platform_actions_element_count:
- message = 'Error: <actions> tags without os and architecture attributes are only allowed '
- message += 'after <actions> tags with os and architecture attributes specified. Skipping '
- message += 'current <actions> tag.'
- log.debug( message )
- continue
- # If we reach this point, it means one of two things: 1) The system and architecture attributes are not
- # defined in this <actions> tag, or 2) The system and architecture attributes are defined, and they are
- # an exact match for the current platform. Append the child element to the list of elements to process.
- actions_elem_list.append( child_element )
- elif child_element.tag == 'action':
- # Any <action> tags within an <actions_group> tagset must come after all <actions> tags.
- if actions_elems_processed == actions_elem_count:
- # If all <actions> elements have been processed, then this <action> element can be appended to the
- # list of actions to execute within this group.
- after_install_actions.append( child_element )
- else:
- # If any <actions> elements remain to be processed, then log a message stating that <action>
- # elements are not allowed to precede any <actions> elements within an <actions_group> tagset.
- message = 'Error: <action> tags are only allowed at the end of an <actions_group> '
- message += 'tagset, after all <actions> tags. '
- message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) )
- log.debug( message )
- continue
- if after_install_actions:
- actions_elem_list.extend( after_install_actions )
- actions_elem_tuples.append( ( grouped, actions_elem_list ) )
- # At this point we have a list of <actions> elems that are either defined within an <actions_group> tagset, and filtered by
- # the current platform, or not defined within an <actions_group> tagset, and not filtered.
- for grouped, actions_elems in actions_elem_tuples:
- if grouped:
- # Platform matching is only performed inside <actions_group> tagsets, os and architecture attributes are otherwise ignored.
+ for in_actions_group, actions_elems in actions_elem_tuples:
+ if in_actions_group:
+ # Platform matching is only performed inside <actions_group> tag sets, os and architecture attributes are otherwise
+ # ignored.
for actions_elem in actions_elems:
system = actions_elem.get( 'os' )
architecture = actions_elem.get( 'architecture' )
- # If this <actions> element has the os and architecture attributes defined, then we only want to process
- # until a successful installation is achieved.
+ # If this <actions> element has the os and architecture attributes defined, then we only want to process until a
+ # successful installation is achieved.
if system and architecture:
- # If an <actions> tag has been defined that matches our current platform, and the recipe specified
- # within that <actions> tag has been successfully processed, skip any remaining platform-specific
- # <actions> tags.
+ # If an <actions> tag has been defined that matches our current platform, and the recipe specified within
+ # that <actions> tag has been successfully processed, skip any remaining platform-specific <actions> tags.
+ # We cannot break out of the look here because there may be <action> tags at the end of the <actions_group>
+ # tag set that must be processed.
if binary_installed:
continue
# No platform-specific <actions> recipe has yet resulted in a successful installation.
@@ -451,33 +377,32 @@
actions_elem=actions_elem,
action_elem=None )
sa_session.refresh( tool_dependency )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ if tool_dependency.status == app.model.ToolDependency.installation_status.INSTALLED:
# If an <actions> tag was found that matches the current platform, and the install_via_fabric method
# did not result in an error state, set binary_installed to True in order to skip any remaining
# platform-specific <actions> tags.
- if not binary_installed:
- binary_installed = True
+ binary_installed = True
else:
- # Otherwise, move on to the next matching <actions> tag, or any defined <actions> tags that do not
- # contain platform-dependent recipes.
- if binary_installed:
- binary_installed = False
- print 'Encountered an error downloading binary for %s version %s: %s' % \
- ( package_name, package_version, tool_dependency.error_message )
+ # Process the next matching <actions> tag, or any defined <actions> tags that do not contain platform
+ # dependent recipes.
+ print 'Error downloading binary for %s version %s: %s' % \
+ ( package_name, package_version, tool_dependency.error_message )
else:
# If no <actions> tags have been defined that match our current platform, or none of the matching
# <actions> tags resulted in a successful tool dependency status, proceed with one and only one
# <actions> tag that is not defined to be platform-specific.
if not binary_installed:
- log.debug( 'Platform-specific recipe failed or not found. Proceeding with platform-independent install recipe.' )
+ print 'Binary installation did not occur, so proceeding with install and compile recipe.'
+ # Make sure to reset for installation if attempt at binary installation resulted in an error.
+ if tool_dependency.status != app.model.ToolDependency.installation_status.NEW:
+ removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency )
install_via_fabric( app,
tool_dependency,
install_dir,
package_name=package_name,
actions_elem=actions_elem,
action_elem=None )
- break
- # Perform any final actions that have been defined within the actions_group tagset, but outside of
+ # Perform any final actions that have been defined within the actions_group tag set, but outside of
# an <actions> tag, such as a set_environment entry, or a download_file or download_by_url command to
# retrieve extra data for this tool dependency. Only do this if the tool dependency is not in an error
# state, otherwise skip this action.
@@ -490,7 +415,7 @@
action_elem=actions_elem )
else:
# <actions> tags outside of an <actions_group> tag shall not check os or architecture, and if the attributes are
- # defined, they will be ignored. All <actions> tags outside of an <actions_group> tagset shall always be processed.
+ # defined, they will be ignored. All <actions> tags outside of an <actions_group> tag set shall always be processed.
# This is the default and original behavior of the install_package method.
install_via_fabric( app,
tool_dependency,
@@ -519,12 +444,12 @@
def install_via_fabric( app, tool_dependency, install_dir, package_name=None, proprietary_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ):
"""Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
- sa_session = app.model.context.current
def evaluate_template( text ):
""" Substitute variables defined in XML blocks from dependencies file."""
return Template( text ).safe_substitute( td_common_util.get_env_var_values( install_dir ) )
+ sa_session = app.model.context.current
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
actions_dict = dict( install_dir=install_dir )
@@ -535,7 +460,7 @@
env_var_dicts = []
if actions_elem is not None:
elems = actions_elem
- if elems.get( 'architecture' ) is not None:
+ if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None:
is_binary_download = True
else:
is_binary_download = False
@@ -750,7 +675,6 @@
new_value = new_value.split( ';' )[ 0 ]
return new_value
-
def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, required_repository, package_name, package_version, tool_dependencies_config ):
"""
Populate an actions dictionary that can be sent to fabric_util.install_and_build_package. This method handles the scenario where a tool_dependencies.xml
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
@@ -239,6 +239,105 @@
os.makedirs( destination_directory )
shutil.move( source_file, destination_directory )
+def parse_package_elem( package_elem, platform_info_dict=None, include_after_install_actions=True ):
+ """
+ Parse a <package> element within a tool dependency definition and return a list of action tuples. This method is called when setting
+ metadata on a repository that includes a tool_dependencies.xml file or when installing a repository that includes a tool_dependencies.xml
+ file. If installing, platform_info_dict must be a valid dictionary and include_after_install_actions must be True.
+ """
+ # The actions_elem_tuples list contains <actions> tag sets (possibly inside of an <actions_group> tag set) to be processed in the order
+ # they are defined in the tool_dependencies.xml file.
+ actions_elem_tuples = []
+ # The tag sets that will go into the actions_elem_list are those that install a compiled binary if the architecture and operating system
+ # match it's defined attributes. If compiled binary is not installed, the first <actions> tag set [following those that have the os and
+ # architecture attributes] that does not have os or architecture attributes will be processed. This tag set must contain the recipe for
+ # downloading and compiling source.
+ actions_elem_list = []
+ for elem in package_elem:
+ if elem.tag == 'actions':
+ # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system.
+ in_actions_group = False
+ actions_elem_tuples.append( ( in_actions_group, elem ) )
+ elif elem.tag == 'actions_group':
+ # We have an actions_group element, and its child <actions> elements should therefore be compared with the current operating system
+ # and processor architecture.
+ in_actions_group = True
+ # Record the number of <actions> elements so we can filter out any <action> elements that precede <actions> elements.
+ actions_elem_count = len( elem.findall( 'actions' ) )
+ # Record the number of <actions> elements that have architecture and os specified, in order to filter out any platform-independent
+ # <actions> elements that come before platform-specific <actions> elements. This call to elem.findall is filtered by tags that have
+ # both the os and architecture specified. For more details, see http://docs.python.org/2/library/xml.etree.elementtree.html Section
+ # 19.7.2.1.
+ platform_actions_element_count = len( elem.findall( 'actions[@architecture][@os]' ) )
+ platform_actions_elements_processed = 0
+ actions_elems_processed = 0
+ # The tag sets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These will be processed
+ # only if they are at the very end of the <actions_group> tag set (after all <actions> tag sets). See below for details.
+ after_install_actions = []
+ # Inspect the <actions_group> element and build the actions_elem_list and the after_install_actions list.
+ for child_element in elem:
+ if child_element.tag == 'actions':
+ actions_elems_processed += 1
+ system = child_element.get( 'os' )
+ architecture = child_element.get( 'architecture' )
+ # Skip <actions> tags that have only one of architecture or os specified, in order for the count in
+ # platform_actions_elements_processed to remain accurate.
+ if ( system and not architecture ) or ( architecture and not system ):
+ log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' )
+ continue
+ # Since we are inside an <actions_group> tag set, compare it with our current platform information and filter the <actions>
+ # tag sets that don't match. Require both the os and architecture attributes to be defined in order to find a match.
+ if system and architecture:
+ platform_actions_elements_processed += 1
+ # If either the os or architecture do not match the platform, this <actions> tag will not be considered a match. Skip
+ # it and proceed with checking the next one.
+ if platform_info_dict:
+ if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture:
+ continue
+ else:
+ # We must not be installing a repository into Galaxy, so determining if we can install a binary is not necessary.
+ continue
+ else:
+ # <actions> tags without both os and architecture attributes are only allowed to be specified after platform-specific
+ # <actions> tags. If we find a platform-independent <actions> tag before all platform-specific <actions> tags have been
+ # processed.
+ if platform_actions_elements_processed < platform_actions_element_count:
+ message = 'Error: <actions> tags without os and architecture attributes are only allowed after all <actions> tags with '
+ message += 'os and architecture attributes have been defined. Skipping the <actions> tag set with no os or architecture '
+ message += 'attributes that has been defined between two <actions> tag sets that have these attributes defined. '
+ log.debug( message )
+ continue
+ # If we reach this point, it means one of two things: 1) The system and architecture attributes are not defined in this
+ # <actions> tag, or 2) The system and architecture attributes are defined, and they are an exact match for the current
+ # platform. Append the child element to the list of elements to process.
+ actions_elem_list.append( child_element )
+ elif child_element.tag == 'action':
+ # Any <action> tags within an <actions_group> tag set must come after all <actions> tags.
+ if actions_elems_processed == actions_elem_count:
+ # If all <actions> elements have been processed, then this <action> element can be appended to the list of actions to
+ # execute within this group.
+ after_install_actions.append( child_element )
+ else:
+ # If any <actions> elements remain to be processed, then log a message stating that <action> elements are not allowed
+ # to precede any <actions> elements within an <actions_group> tag set.
+ message = 'Error: <action> tags are only allowed at the end of an <actions_group> tag set after all <actions> tags. '
+ message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) )
+ log.debug( message )
+ continue
+ if platform_info_dict is None and not include_after_install_actions:
+ # We must be setting metadata on a repository.
+ actions_elem_tuples.append( ( in_actions_group, actions_elem_list[ 0 ] ) )
+ elif platform_info_dict is not None and include_after_install_actions:
+ # We must be installing a repository.
+ if after_install_actions:
+ actions_elem_list.extend( after_install_actions )
+ actions_elem_tuples.append( ( in_actions_group, actions_elem_list ) )
+ else:
+ # Skip any element that is not <actions> or <actions_group> - this will skip comments, <repository> tags and <readme> tags.
+ in_actions_group = False
+ continue
+ return actions_elem_tuples
+
def tar_extraction_directory( file_path, file_name ):
"""Try to return the correct extraction directory."""
file_name = file_name.strip()
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/commit_util.py
--- a/lib/tool_shed/util/commit_util.py
+++ b/lib/tool_shed/util/commit_util.py
@@ -133,6 +133,24 @@
bzipped_file.close()
shutil.move( uncompressed, uploaded_file_name )
+def handle_complex_repository_dependency_elem( trans, elem, sub_elem_index, sub_elem, sub_elem_altered, altered, unpopulate=False ):
+ """
+ Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag that defines a complex repository
+ dependency.
+ """
+ # The received sub_elem looks something like the following:
+ # <repository name="package_eigen_2_0" owner="test" prior_installation_required="True" />
+ revised, repository_elem, error_message = handle_repository_dependency_elem( trans, sub_elem, unpopulate=unpopulate )
+ if error_message:
+ exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message
+ raise Exception( exception_message )
+ if revised:
+ elem[ sub_elem_index ] = repository_elem
+ sub_elem_altered = True
+ if not altered:
+ altered = True
+ return altered, sub_elem_altered, elem
+
def handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
undesirable_dirs_removed, undesirable_files_removed ):
repo_dir = repository.repo_path( trans.app )
@@ -300,7 +318,30 @@
error_message = 'Unable to locate repository with name %s and owner %s. ' % ( str( name ), str( owner ) )
return revised, elem, error_message
+def handle_set_environment_for_install( trans, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ):
+ # <action type="set_environment_for_install">
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
+ # <package name="eigen" version="2.0.17" />
+ # </repository>
+ # </action>
+ for repo_index, repo_elem in enumerate( action_elem ):
+ revised, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate )
+ if error_message:
+ exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message
+ raise Exception( exception_message )
+ if revised:
+ action_elem[ repo_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+ if package_altered:
+ actions_elem[ action_index ] = action_elem
+ return package_altered, altered, actions_elem
+
def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ):
+ """
+ Populate or unpopulate the tooshed and changeset_revision attributes of each <repository> tag defined within a tool_dependencies.xml file.
+ """
altered = False
# Make sure we're looking at a valid tool_dependencies.xml file.
tree, error_message = xml_util.parse_xml( tool_dependencies_config )
@@ -308,51 +349,88 @@
return False, None
root = tree.getroot()
if root.tag == 'tool_dependency':
+ package_altered = False
for root_index, root_elem in enumerate( root ):
# <package name="eigen" version="2.0.17">
+ package_altered = False
if root_elem.tag == 'package':
- package_altered = False
for package_index, package_elem in enumerate( root_elem ):
if package_elem.tag == 'repository':
- # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
- revised, repository_elem, error_message = handle_repository_dependency_elem( trans, package_elem, unpopulate=unpopulate )
- if error_message:
- exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message
- raise Exception( exception_message )
- if revised:
- root_elem[ package_index ] = repository_elem
- package_altered = True
- if not altered:
- altered = True
+ # We have a complex repository dependency.
+ altered, package_altered, root_elem = handle_complex_repository_dependency_elem( trans,
+ root_elem,
+ package_index,
+ package_elem,
+ package_altered,
+ altered,
+ unpopulate=unpopulate )
elif package_elem.tag == 'install':
# <install version="1.0">
for actions_index, actions_elem in enumerate( package_elem ):
- for action_index, action_elem in enumerate( actions_elem ):
- action_type = action_elem.get( 'type' )
- if action_type == 'set_environment_for_install':
- # <action type="set_environment_for_install">
- # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
- # <package name="eigen" version="2.0.17" />
- # </repository>
- # </action>
- for repo_index, repo_elem in enumerate( action_elem ):
- revised, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate )
- if error_message:
- exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message
- raise Exception( exception_message )
- if revised:
- action_elem[ repo_index ] = repository_elem
- package_altered = True
- if not altered:
- altered = True
- if package_altered:
- actions_elem[ action_index ] = action_elem
+ if actions_elem.tag == 'actions_group':
+ # Inspect all entries in the <actions_group> tag set, skipping <actions> tag sets that define os and architecture
+ # attributes. We want to inspect only the last <actions> tag set contained within the <actions_group> tag set to
+ # see if a complex repository dependency is defined.
+ for actions_group_index, actions_group_elem in enumerate( actions_elem ):
+ if actions_group_elem.tag == 'actions':
+ # Skip all actions tags that include os or architecture attributes.
+ system = actions_group_elem.get( 'os' )
+ architecture = actions_group_elem.get( 'architecture' )
+ if system or architecture:
+ continue
+ # ...
+ # <actions>
+ # <package name="libgtextutils" version="0.6">
+ # <repository name="package_libgtextutils_0_6" owner="test" prior_installation_required="True" />
+ # </package>
+ # ...
+ for last_actions_index, last_actions_elem in enumerate( actions_group_elem ):
+ last_actions_package_altered = False
+ if last_actions_elem.tag == 'package':
+ for last_actions_elem_package_index, last_actions_elem_package_elem in enumerate( last_actions_elem ):
+ if last_actions_elem_package_elem.tag == 'repository':
+ # We have a complex repository dependency.
+ altered, last_actions_package_altered, last_actions_elem = \
+ handle_complex_repository_dependency_elem( trans,
+ last_actions_elem,
+ last_actions_elem_package_index,
+ last_actions_elem_package_elem,
+ last_actions_package_altered,
+ altered,
+ unpopulate=unpopulate )
+ if last_actions_package_altered:
+ last_actions_elem[ last_actions_elem_package_index ] = last_actions_elem_package_elem
+ actions_group_elem[ last_actions_index ] = last_actions_elem
+ else:
+ last_actions_elem_action_type = last_actions_elem.get( 'type' )
+ if last_actions_elem_action_type == 'set_environment_for_install':
+ last_actions_package_altered, altered, last_actions_elem = \
+ handle_set_environment_for_install( trans,
+ last_actions_package_altered,
+ altered,
+ actions_group_elem,
+ last_actions_index,
+ last_actions_elem,
+ unpopulate=unpopulate )
+ elif actions_elem.tag == 'actions':
+ # We are not in an <actions_group> tag set, so we must be in an <actions> tag set.
+ for action_index, action_elem in enumerate( actions_elem ):
+
+ action_type = action_elem.get( 'type' )
+ if action_type == 'set_environment_for_install':
+ package_altered, altered, actions_elem = handle_set_environment_for_install( trans,
+ package_altered,
+ altered,
+ actions_elem,
+ action_index,
+ action_elem,
+ unpopulate=unpopulate )
if package_altered:
package_elem[ actions_index ] = actions_elem
- if package_altered:
- root_elem[ package_index ] = package_elem
- if package_altered:
- root[ root_index ] = root_elem
+ if package_altered:
+ root_elem[ package_index ] = package_elem
+ if package_altered:
+ root[ root_index ] = root_elem
return altered, root
return False, None
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -242,9 +242,8 @@
def get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies ):
"""Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy."""
- # FIXME: this method currently populates and returns only missing tool dependencies since tool dependencies defined for complex repository dependency
- # relationships is not currently supported. This method should be enhanced to search for installed tool dependencies defined as complex repository
- # dependency relationships when that feature is implemented.
+ # FIXME: confirm that this method currently populates and returns only missing tool dependencies. If so, this method should be enhanced to search for
+ # installed tool dependencies defined as complex repository dependency relationships.
if all_tool_dependencies:
tool_dependencies = {}
missing_tool_dependencies = {}
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -751,6 +751,32 @@
# dependency definition will be set as invalid. This is currently the only case where a tool dependency definition is
# considered invalid.
repository_dependency_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app=app, repository_elem=sub_elem )
+ elif sub_elem.tag == 'install':
+ package_install_version = sub_elem.get( 'version', '1.0' )
+ if package_install_version == '1.0':
+ # Complex repository dependencies can be defined within the last <actions> tag set contained in an <actions_group> tag set.
+ # Comments, <repository> tag sets and <readme> tag sets will be skipped in td_common_util.parse_package_elem().
+ actions_elem_tuples = td_common_util.parse_package_elem( sub_elem, platform_info_dict=None, include_after_install_actions=False )
+ if actions_elem_tuples:
+ # We now have a list of a single tuple that looks something like: [(True, <Element 'actions' at 0x104017850>)]
+ actions_elem_tuple = actions_elem_tuples[ 0 ]
+ in_actions_group, actions_elem = actions_elem_tuple
+ if in_actions_group:
+ # Since we're inside an <actions_group> tag set, inspect the actions_elem to see if a complex repository dependency
+ # is defined.
+ for action_elem in actions_elem:
+ if action_elem.tag == 'package':
+ # <package name="libgtextutils" version="0.6">
+ # <repository name="package_libgtextutils_0_6" owner="test" prior_installation_required="True" />
+ # </package>
+ ae_package_name = action_elem.get( 'name', None )
+ ae_package_version = action_elem.get( 'version', None )
+ if ae_package_name and ae_package_version:
+ for sub_action_elem in action_elem:
+ if sub_action_elem.tag == 'repository':
+ # We have a complex repository dependency.
+ repository_dependency_tup, repository_dependency_is_valid, error_message = \
+ handle_repository_elem( app=app, repository_elem=sub_action_elem )
if requirements_dict:
dependency_key = '%s/%s' % ( package_name, package_version )
if repository_dependency_is_valid:
diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -365,14 +365,15 @@
missing_tool_dependencies = repository_missing_tool_dependencies
return installed_tool_dependencies, missing_tool_dependencies
-def remove_tool_dependency( trans, tool_dependency ):
- dependency_install_dir = tool_dependency.installation_directory( trans.app )
+def remove_tool_dependency( app, tool_dependency ):
+ sa_session = app.model.context.current
+ dependency_install_dir = tool_dependency.installation_directory( app )
removed, error_message = remove_tool_dependency_installation_directory( dependency_install_dir )
if removed:
- tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED
+ tool_dependency.status = app.model.ToolDependency.installation_status.UNINSTALLED
tool_dependency.error_message = None
- trans.sa_session.add( tool_dependency )
- trans.sa_session.flush()
+ sa_session.add( tool_dependency )
+ sa_session.flush()
return removed, error_message
def remove_tool_dependency_installation_directory( dependency_install_dir ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/53f51406718d/
Changeset: 53f51406718d
User: Dave Bouvier
Date: 2013-09-05 23:04:41
Summary: Display the clone URL on an empty repository's view and manage pages for any user with write access.
Affected #: 2 files
diff -r ea75f5629c38a529cf70c6578a74dac7a8073880 -r 53f51406718dbe21dc1deec10749e9dadd0ee77c templates/webapps/tool_shed/repository/manage_repository.mako
--- a/templates/webapps/tool_shed/repository/manage_repository.mako
+++ b/templates/webapps/tool_shed/repository/manage_repository.mako
@@ -133,7 +133,7 @@
<label>${sharable_link_label}</label>
${render_sharable_str( repository, changeset_revision=sharable_link_changeset_revision )}
</div>
- %if can_download:
+ %if can_download or can_push:
<div class="form-row"><label>Clone this repository:</label>
${render_clone_str( repository )}
diff -r ea75f5629c38a529cf70c6578a74dac7a8073880 -r 53f51406718dbe21dc1deec10749e9dadd0ee77c templates/webapps/tool_shed/repository/view_repository.mako
--- a/templates/webapps/tool_shed/repository/view_repository.mako
+++ b/templates/webapps/tool_shed/repository/view_repository.mako
@@ -86,7 +86,7 @@
<label>${sharable_link_label}</label>
${render_sharable_str( repository, changeset_revision=sharable_link_changeset_revision )}
</div>
- %if can_download:
+ %if can_download or can_push:
<div class="form-row"><label>Clone this repository:</label>
${render_clone_str( repository )}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.