1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ea75f5629c38/
Changeset: ea75f5629c38
User: natefoo
Date: 2013-09-05 21:28:44
Summary: Add the now-required 'config' param to the DistributedObjectStore's call to the superclass __jnit__().
Affected #: 1 file
diff -r 559602ed992f131caa530aa468e86b3cd97c1bc5 -r ea75f5629c38a529cf70c6578a74dac7a8073880 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -372,7 +372,7 @@
"""
def __init__(self, config, fsmon=False):
- super(DistributedObjectStore, self).__init__()
+ super(DistributedObjectStore, self).__init__(config)
self.distributed_config = config.distributed_object_store_config_file
assert self.distributed_config is not None, "distributed object store ('object_store = distributed') " \
"requires a config file, please set one in " \
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/559602ed992f/
Changeset: 559602ed992f
User: jgoecks
Date: 2013-09-05 20:19:50
Summary: Correctly treat VCF format and genotyping fields as optional.
Affected #: 1 file
diff -r 087a792e45bcfb48d0717f6947520b44d9b56d11 -r 559602ed992f131caa530aa468e86b3cd97c1bc5 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -670,9 +670,15 @@
# Split line and aggregate data.
feature = line.split()
pos, c_id, ref, alt, qual, c_filter, info = feature[ 1:8 ]
- format = feature[ 8 ]
- samples_data = feature [ 9: ]
- # VCF is 1-based.
+
+ # Format and samples data are optional.
+ format = None
+ samples_data = []
+ if len( feature ) > 8:
+ format = feature[ 8 ]
+ samples_data = feature [ 9: ]
+
+ # VCF is 1-based but provided position is 0-based.
pos = int( pos ) - 1
# FIXME: OK to skip?
@@ -680,11 +686,11 @@
count -= 1
continue
- # Count number of samples matching each allele.
+ # Set up array to track allele counts.
allele_counts = [ 0 for i in range ( alt.count( ',' ) + 1 ) ]
+ sample_gts = []
- # Process and pack sample genotype.
- sample_gts = []
+ # Process and pack samples' genotype and count alleles across samples.
alleles_seen = {}
has_alleles = False
@@ -711,7 +717,7 @@
genotype = ''
sample_gts.append( genotype )
-
+
# Add locus data.
locus_data = [
-1,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/087a792e45bc/
Changeset: 087a792e45bc
User: carlfeberhard
Date: 2013-09-05 19:49:46
Summary: JS Localization: fix replicate quotameter code in galaxy.masthead.mako; add mako localization function file
Affected #: 2 files
diff -r 4e86427c63f63b1d9a3278ea0adfbe0e89906c83 -r 087a792e45bcfb48d0717f6947520b44d9b56d11 templates/webapps/galaxy/galaxy.masthead.mako
--- a/templates/webapps/galaxy/galaxy.masthead.mako
+++ b/templates/webapps/galaxy/galaxy.masthead.mako
@@ -230,7 +230,7 @@
<!-- quota meter -->
${h.templates( "helpers-common-templates", "template-user-quotaMeter-quota", "template-user-quotaMeter-usage" )}
- ${h.js( "mvc/base-mvc", "mvc/user/user-model", "mvc/user/user-quotameter" )}
+ ${h.js( "mvc/base-mvc", "utils/localization", "mvc/user/user-model", "mvc/user/user-quotameter" )}
<script type="text/javascript">
// start a Galaxy namespace for objects created
diff -r 4e86427c63f63b1d9a3278ea0adfbe0e89906c83 -r 087a792e45bcfb48d0717f6947520b44d9b56d11 templates/webapps/galaxy/utils/localization.mako
--- /dev/null
+++ b/templates/webapps/galaxy/utils/localization.mako
@@ -0,0 +1,12 @@
+<%def name="localize_js_strings( strings_to_localize )">
+##PRECONDITION: static/scripts/utils/localization.js should be loaded first
+## adds localized versions of strings to the JS GalaxyLocalization for use in later JS
+## where strings_to_localize is a list of strings to localize
+<script type="text/javascript">
+ ## strings need to be mako rendered in order to use the '_' gettext helper for localization
+ ## these are then cached in the js object
+ GalaxyLocalization.setLocalizedString(
+ ${ h.to_json_string( dict([ ( string, _(string) ) for string in strings_to_localize ]) ) }
+ );
+</script>
+</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9fe9aa6ac504/
Changeset: 9fe9aa6ac504
Branch: stable
User: Dave Bouvier
Date: 2013-09-04 20:30:49
Summary: Fix for duplicate records created when a repository is installed from the tool shed, and the same repository had previously been installed and uninstalled.
Affected #: 1 file
diff -r ee6d553f7847ec47cca0cf205c767d1407eccbab -r 9fe9aa6ac504bc7703ba5d465e68170d79e89905 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -189,6 +189,10 @@
log.debug( debug_msg )
suc.reset_previously_installed_repository( trans, installed_tool_shed_repository )
can_update = True
+ reinstalling = True
+ # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find the previously installed
+ # and uninstalled repository instead of creating a new record.
+ changeset_revision = installed_tool_shed_repository.installed_changeset_revision
else:
# A tool shed repository is being installed into a Galaxy instance for the first time, or we're attempting to install it or reinstall it resulted
# in an error. In the latter case, the repository record in the database has no metadata and it's status has been set to 'New'. In either case,
https://bitbucket.org/galaxy/galaxy-central/commits/380f30568086/
Changeset: 380f30568086
User: Dave Bouvier
Date: 2013-09-04 20:34:57
Summary: Merge in bugfix from stable.
Affected #: 1 file
diff -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae -r 380f30568086d773d6153a17f3fd1d85a7b42626 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -189,6 +189,10 @@
log.debug( debug_msg )
suc.reset_previously_installed_repository( trans, installed_tool_shed_repository )
can_update = True
+ reinstalling = True
+ # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find the previously installed
+ # and uninstalled repository instead of creating a new record.
+ changeset_revision = installed_tool_shed_repository.installed_changeset_revision
else:
# A tool shed repository is being installed into a Galaxy instance for the first time, or we're attempting to install it or reinstall it resulted
# in an error. In the latter case, the repository record in the database has no metadata and it's status has been set to 'New'. In either case,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4a4a84d35bc5/
Changeset: 4a4a84d35bc5
User: Dave Bouvier
Date: 2013-09-03 18:20:11
Summary: Update functional tests to reflect changes in how text is sanitized.
Affected #: 2 files
diff -r 6e291fa55e10aa235196ac1c9f4c4dcb0afeaf6d -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -151,7 +151,7 @@
commit_message="Uploaded filtering.txt",
strings_displayed=[],
strings_not_displayed=[] )
- self.display_manage_repository_page( repository, strings_displayed=[ 'Readme&nbsp;file&nbsp;for&nbsp;filtering&nbsp;1.1.0' ] )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0055_upload_filtering_test_data( self ):
'''Upload filtering test data.'''
@@ -219,19 +219,19 @@
commit_message="Uploaded readme.txt",
strings_displayed=[],
strings_not_displayed=[] )
- self.display_manage_repository_page( repository, strings_displayed=[ 'This&nbsp;is&nbsp;a&nbsp;readme&nbsp;file.' ] )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] )
# Verify that there is a different readme file for each metadata revision.
metadata_revisions = self.get_repository_metadata_revisions( repository )
self.display_manage_repository_page( repository,
- strings_displayed=[ 'Readme&nbsp;file&nbsp;for&nbsp;filtering&nbsp;1.1.0',
- 'This&nbsp;is&nbsp;a&nbsp;readme&nbsp;file.' ] )
+ strings_displayed=[ 'Readme file for filtering 1.1.0',
+ 'This is a readme file.' ] )
def test_0075_delete_readme_txt_file( self ):
'''Delete the readme.txt file.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
- self.display_manage_repository_page( repository, strings_displayed=[ 'Readme&nbsp;file&nbsp;for&nbsp;filtering&nbsp;1.1.0' ] )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0080_search_for_valid_filter_tool( self ):
'''Search for the filtering tool by tool ID, name, and version.'''
@@ -335,7 +335,7 @@
commit_message="Uploaded new readme.txt with invalid ascii characters.",
strings_displayed=[],
strings_not_displayed=[] )
- self.display_manage_repository_page( repository, strings_displayed=[ 'These&nbsp;characters&nbsp;should&nbsp;not' ] )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'These characters should not' ] )
def test_0130_verify_handling_of_invalid_characters( self ):
'''Load the above changeset in the change log and confirm that there is no server error displayed.'''
@@ -353,5 +353,5 @@
# Check for the changeset revision, repository name, owner username, 'repos' in the clone url, and the captured
# unicode decoding error message.
strings_displayed = [ 'Changeset %d:%s' % ( revision_number, revision_hash ), 'filtering_0000', 'user1', 'repos', 'added:',
- '+These&nbsp;characters&nbsp;should&nbsp;not' ]
+ '+These characters should not' ]
self.load_changeset_in_tool_shed( repository_id, changeset_revision, strings_displayed=strings_displayed )
\ No newline at end of file
diff -r 6e291fa55e10aa235196ac1c9f4c4dcb0afeaf6d -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 test/tool_shed/functional/test_0410_repository_component_review_access_control.py
--- a/test/tool_shed/functional/test_0410_repository_component_review_access_control.py
+++ b/test/tool_shed/functional/test_0410_repository_component_review_access_control.py
@@ -186,10 +186,10 @@
self.login( email=common.test_user_3_email, username=common.test_user_3_name )
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
user = test_db_util.get_user( common.test_user_2_email )
- strings_displayed = [ 'A&nbsp;good&nbsp;set&nbsp;of&nbsp;functional&nbsp;tests.',
- 'Clear&nbsp;and&nbsp;concise&nbsp;readme&nbsp;file',
- 'a&nbsp;true&nbsp;pleasure&nbsp;to&nbsp;read.',
- 'Excellent&nbsp;tool,&nbsp;easy&nbsp;to&nbsp;use.' ]
+ strings_displayed = [ 'A good set of functional tests.',
+ 'Clear and concise readme file',
+ 'a true pleasure to read.',
+ 'Excellent tool, easy to use.' ]
changeset_revision = self.get_repository_tip( repository )
review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
self.browse_component_review( review, strings_displayed=strings_displayed )
https://bitbucket.org/galaxy/galaxy-central/commits/b9da077a79c3/
Changeset: b9da077a79c3
User: Dave Bouvier
Date: 2013-09-03 18:22:29
Summary: Tool dependency definitions: Add <actions_group> tag and support for filtering contained <actions> tags by architecture and operating system.
Affected #: 4 files
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -217,13 +217,25 @@
# Eliminate the download_by_url action so remaining actions can be processed correctly.
filtered_actions = actions[ 1: ]
url = action_dict[ 'url' ]
+ is_binary = action_dict.get( 'is_binary', False )
+ log.debug( 'Attempting to download via url: %s', url )
if 'target_filename' in action_dict:
- # Sometimes compressed archives extracts their content to a folder other than the default defined file name. Using this
+ # Sometimes compressed archives extract their content to a folder other than the default defined file name. Using this
# attribute will ensure that the file name is set appropriately and can be located after download, decompression and extraction.
downloaded_filename = action_dict[ 'target_filename' ]
else:
downloaded_filename = os.path.split( url )[ -1 ]
dir = td_common_util.url_download( work_dir, downloaded_filename, url, extract=True )
+ if is_binary:
+ log_file = os.path.join( install_dir, INSTALLATION_LOG )
+ log.debug( 'log_file: %s' % log_file )
+ if os.path.exists( log_file ):
+ logfile = open( log_file, 'ab' )
+ else:
+ logfile = open( log_file, 'wb' )
+ logfile.write( 'Successfully downloaded from url: %s\n' % action_dict[ 'url' ] )
+ logfile.close()
+ log.debug( 'Successfully downloaded from url: %s' % action_dict[ 'url' ] )
elif action_type == 'shell_command':
# <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
# Eliminate the shell_command clone action so remaining actions can be processed correctly.
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -333,13 +333,174 @@
type='package',
status=app.model.ToolDependency.installation_status.INSTALLING,
set_status=True )
+ # Get the information that defines the current platform.
+ platform_info_dict = tool_dependency_util.get_platform_info_dict()
if package_install_version == '1.0':
- # Handle tool dependency installation using a fabric method included in the Galaxy framework.
- for actions_elem in package_elem:
- install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=package_name )
- sa_session.refresh( tool_dependency )
- if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
- print package_name, 'version', package_version, 'installed in', install_dir
+ # Handle tool dependency installation using a fabric method included in the Galaxy framework. The first thing we do
+ # is check the installation architecture to see if we have a precompiled binary that works on the target system.
+ binary_installed = False
+ actions_elem_tuples = []
+ # Build a list of grouped and ungrouped <actions> tagsets to be processed in the order they are defined in the
+ # tool_dependencies.xml file.
+ for elem in package_elem:
+ # Default to not treating actions as grouped.
+ grouped = False
+ # Skip any element that is not <actions> or <actions_group>. This will also skip comments and <readme> tags.
+ if elem.tag not in [ 'actions', 'actions_group' ]:
+ continue
+ if elem.tag == 'actions':
+ # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system.
+ grouped = False
+ actions_elem_tuples.append( ( grouped, elem ) )
+ else:
+ # Record the number of <actions> elements, in order to filter out any <action> elements that precede <actions>
+ # elements.
+ actions_elem_count = len( elem.findall( 'actions' ) )
+ # Record the number of <actions> elements that have architecture and os specified, in order to filter out any
+ # platform-independent <actions> elements that come before platform-specific <actions> elements. This call to
+ # elem.findall is filtered by tags that have both the os and architecture specified.
+ # For more details, see http://docs.python.org/2/library/xml.etree.elementtree.html Section 19.7.2.1.
+ platform_actions_element_count = len( elem.findall( 'actions[@architecture][@os]' ) )
+ platform_actions_elements_processed = 0
+ actions_elems_processed = 0
+ # We have an actions_group element, and its child <actions> elements should therefore be compared with the current
+ # operating system and processor architecture.
+ grouped = True
+ # The tagsets that will go into the actions_elem_list are those that install a precompiled binary if the
+ # architecture and operating system match its defined attributes. If precompiled binary is not installed
+ # the first <actions> tag following those that have the os and architecture attributes will be processed
+ # in order to install and compile the source.
+ actions_elem_list = []
+ # The tagsets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These
+ # will only be processed if they are at the end of the <actions_group> tagset. See below for details.
+ after_install_actions = []
+ platform_independent_actions = []
+ # Loop through the <actions_group> element and build the actions_elem_list and the after_install_actions list.
+ for child_element in elem:
+ if child_element.tag == 'actions':
+ actions_elems_processed += 1
+ system = child_element.get( 'os' )
+ architecture = child_element.get( 'architecture' )
+ # Skip <actions> tags that have only one of architecture or os specified, in order for the count in
+ # platform_actions_elements_processed to remain accurate.
+ if ( system and not architecture ) or ( architecture and not system ):
+ log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' )
+ continue
+ # Since we are inside an <actions_group> tagset, compare it with our current platform information and filter
+ # the <actions> tagsets that don't match. Require both the os and architecture attributes to be defined in
+ # order to find a match.
+ if system and architecture:
+ platform_actions_elements_processed += 1
+ # If either the os or architecture do not match the platform, this <actions> tag will not be considered
+ # a match. Skip it and proceed with checking the next one.
+ if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture:
+ continue
+ else:
+ # <actions> tags without both os and architecture attributes are only allowed to be specified after
+ # platform-specific <actions> tags. If we find a platform-independent <actions> tag before all
+ # platform-specific <actions> tags have been processed, log a message stating this and skip to the
+ # next <actions> tag.
+ if platform_actions_elements_processed < platform_actions_element_count:
+ message = 'Error: <actions> tags without os and architecture attributes are only allowed '
+ message += 'after <actions> tags with os and architecture attributes specified. Skipping '
+ message += 'current <actions> tag.'
+ log.debug( message )
+ continue
+ # If we reach this point, it means one of two things: 1) The system and architecture attributes are not
+ # defined in this <actions> tag, or 2) The system and architecture attributes are defined, and they are
+ # an exact match for the current platform. Append the child element to the list of elements to process.
+ actions_elem_list.append( child_element )
+ elif child_element.tag == 'action':
+ # Any <action> tags within an <actions_group> tagset must come after all <actions> tags.
+ if actions_elems_processed == actions_elem_count:
+ # If all <actions> elements have been processed, then this <action> element can be appended to the
+ # list of actions to execute within this group.
+ after_install_actions.append( child_element )
+ else:
+ # If any <actions> elements remain to be processed, then log a message stating that <action>
+ # elements are not allowed to precede any <actions> elements within an <actions_group> tagset.
+ message = 'Error: <action> tags are only allowed at the end of an <actions_group> '
+ message += 'tagset, after all <actions> tags. '
+ message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) )
+ log.debug( message )
+ continue
+ if after_install_actions:
+ actions_elem_list.extend( after_install_actions )
+ actions_elem_tuples.append( ( grouped, actions_elem_list ) )
+ # At this point we have a list of <actions> elems that are either defined within an <actions_group> tagset, and filtered by
+ # the current platform, or not defined within an <actions_group> tagset, and not filtered.
+ for grouped, actions_elems in actions_elem_tuples:
+ if grouped:
+ # Platform matching is only performed inside <actions_group> tagsets, os and architecture attributes are otherwise ignored.
+ for actions_elem in actions_elems:
+ system = actions_elem.get( 'os' )
+ architecture = actions_elem.get( 'architecture' )
+ # If this <actions> element has the os and architecture attributes defined, then we only want to process
+ # until a successful installation is achieved.
+ if system and architecture:
+ # If an <actions> tag has been defined that matches our current platform, and the recipe specified
+ # within that <actions> tag has been successfully processed, skip any remaining platform-specific
+ # <actions> tags.
+ if binary_installed:
+ continue
+ # No platform-specific <actions> recipe has yet resulted in a successful installation.
+ install_via_fabric( app,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=actions_elem,
+ action_elem=None )
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ # If an <actions> tag was found that matches the current platform, and the install_via_fabric method
+ # did not result in an error state, set binary_installed to True in order to skip any remaining
+ # platform-specific <actions> tags.
+ if not binary_installed:
+ binary_installed = True
+ else:
+ # Otherwise, move on to the next matching <actions> tag, or any defined <actions> tags that do not
+ # contain platform-dependent recipes.
+ if binary_installed:
+ binary_installed = False
+ print 'Encountered an error downloading binary for %s version %s: %s' % \
+ ( package_name, package_version, tool_dependency.error_message )
+ else:
+ # If no <actions> tags have been defined that match our current platform, or none of the matching
+ # <actions> tags resulted in a successful tool dependency status, proceed with one and only one
+ # <actions> tag that is not defined to be platform-specific.
+ if not binary_installed:
+ log.debug( 'Platform-specific recipe failed or not found. Proceeding with platform-independent install recipe.' )
+ install_via_fabric( app,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=actions_elem,
+ action_elem=None )
+ break
+ # Perform any final actions that have been defined within the actions_group tagset, but outside of
+ # an <actions> tag, such as a set_environment entry, or a download_file or download_by_url command to
+ # retrieve extra data for this tool dependency. Only do this if the tool dependency is not in an error
+ # state, otherwise skip this action.
+ if actions_elem.tag == 'action' and tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ install_via_fabric( app,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=None,
+ action_elem=actions_elem )
+ else:
+ # <actions> tags outside of an <actions_group> tag shall not check os or architecture, and if the attributes are
+ # defined, they will be ignored. All <actions> tags outside of an <actions_group> tagset shall always be processed.
+ # This is the default and original behavior of the install_package method.
+ install_via_fabric( app,
+ tool_dependency,
+ install_dir,
+ package_name=package_name,
+ actions_elem=actions_elems,
+ action_elem=None )
+ sa_session.refresh( tool_dependency )
+ if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
+ print package_name, 'version', package_version, 'installed in', install_dir
else:
raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
elif package_elem.tag == 'readme':
@@ -356,7 +517,7 @@
# print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path
return tool_dependency
-def install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=None, proprietary_fabfile_path=None, **kwd ):
+def install_via_fabric( app, tool_dependency, install_dir, package_name=None, proprietary_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ):
"""Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
sa_session = app.model.context.current
@@ -372,8 +533,21 @@
actions = []
all_env_shell_file_paths = []
env_var_dicts = []
- # Make sure to skip all comments, since they are now included in the XML tree.
- for action_elem in actions_elem.findall( 'action' ):
+ if actions_elem is not None:
+ elems = actions_elem
+ if elems.get( 'architecture' ) is not None:
+ is_binary_download = True
+ else:
+ is_binary_download = False
+ elif action_elem:
+ # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded.
+ elems = [ action_elem ]
+ else:
+ elems = []
+ for action_elem in elems:
+ # Make sure to skip all comments, since they are now included in the XML tree.
+ if action_elem.tag != 'action':
+ continue
action_dict = {}
action_type = action_elem.get( 'type', 'shell_command' )
if action_type == 'download_binary':
@@ -420,6 +594,8 @@
raise Exception( "Unsupported template language '%s' in tool dependency definition." % str( language ) )
elif action_type == 'download_by_url':
# <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
+ if is_binary_download:
+ action_dict[ 'is_binary' ] = True
if action_elem.text:
action_dict[ 'url' ] = action_elem.text
target_filename = action_elem.get( 'target_filename', None )
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -370,7 +370,7 @@
tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
except Exception, e:
error_message = "Error installing tool dependency %s version %s: %s" % ( str( package_name ), str( package_version ), str( e ) )
- log.debug( error_message )
+ log.exception( error_message )
if tool_dependency:
tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
tool_dependency.error_message = error_message
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -160,10 +160,10 @@
def get_download_url_for_platform( url_templates, platform_info_dict ):
'''
- Compare the dict returned by get_platform_info() with the values specified in the base_url element. Return
+ Compare the dict returned by get_platform_info() with the values specified in the url_template element. Return
true if and only if all defined attributes match the corresponding dict entries. If an entry is not
- defined in the base_url element, it is assumed to be irrelevant at this stage. For example,
- <base_url os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit</base_url>
+ defined in the url_template element, it is assumed to be irrelevant at this stage. For example,
+ <url_template os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit</url_template>
where the OS must be 'darwin', but the architecture is filled in later using string.Template.
'''
os_ok = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6e291fa55e10/
Changeset: 6e291fa55e10
User: greg
Date: 2013-09-03 17:10:50
Summary: Convert markupsafe object to unicode before rendering in the tool shed.
Affected #: 1 file
diff -r 535d8ed52369a8bf02ce5e382b488bf94a6f257b -r 6e291fa55e10aa235196ac1c9f4c4dcb0afeaf6d lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1468,7 +1468,7 @@
text = unicodify( text )
except UnicodeDecodeError, e:
return "Error decoding string: %s" % str( e )
- text = markupsafe.escape( text )
+ text = unicode( markupsafe.escape( text ) )
text = text.replace( '\n', '<br/>' )
text = text.replace( ' ', ' ' )
text = text.replace( ' ', ' ' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/76d580b93d86/
Changeset: 76d580b93d86
User: james_taylor
Date: 2013-09-02 21:02:33
Summary: Adding a CITATION file, http://software-carpentry.org/blog/2013/09/introducing-citation-files.html
Affected #: 1 file
diff -r 0470feeb593f9797fa9fd19dfdac4751a7ca788b -r 76d580b93d86940350bb810cab0c8ef088a2197c CITATION
--- /dev/null
+++ b/CITATION
@@ -0,0 +1,54 @@
+If you use or extend Galaxy in your published work, please cite each of the
+following publications:
+
+- Goecks, J, Nekrutenko, A, Taylor, J and The Galaxy Team. "Galaxy: a
+ comprehensive approach for supporting accessible, reproducible, and
+ transparent computational research in the life sciences."
+ Genome Biol. 2010 Aug 25;11(8):R86.
+
+- Blankenberg D, Von Kuster G, Coraor N, Ananda G, Lazarus R, Mangan M,
+ Nekrutenko A, Taylor J. "Galaxy: a web-based genome analysis tool for
+ experimentalists". Current Protocols in Molecular Biology.
+ 2010 Jan; Chapter 19:Unit 19.10.1-21.
+
+- Giardine B, Riemer C, Hardison RC, Burhans R, Elnitski L, Shah P, Zhang Y,
+ Blankenberg D, Albert I, Taylor J, Miller W, Kent WJ, Nekrutenko A. "Galaxy:
+ a platform for interactive large-scale genome analysis."
+ Genome Research. 2005 Oct; 15(10):1451-5.
+
+See also: http://wiki.galaxyproject.org/CitingGalaxy
+
+
+
+BibTeX format:
+
+@article{goecks2010galaxy,
+ title={Galaxy: a comprehensive approach for supporting accessible, reproducible, and transparent computational research in the life sciences},
+ author={Goecks, Jeremy and Nekrutenko, Anton and Taylor, James and The Galaxy Team},
+ journal={Genome Biol},
+ volume={11},
+ number={8},
+ pages={R86},
+ year={2010}
+}
+
+@article{blankenberg2010galaxy,
+ title={Galaxy: A Web-Based Genome Analysis Tool for Experimentalists},
+ author={Blankenberg, Daniel and Kuster, Gregory Von and Coraor, Nathaniel and Ananda, Guruprasad and Lazarus, Ross and Mangan, Mary and Nekrutenko, Anton and Taylor, James},
+ journal={Current protocols in molecular biology},
+ pages={19--10},
+ year={2010},
+ publisher={John Wiley \& Sons, Inc.}
+}
+
+@article{giardine2005galaxy,
+ title={Galaxy: a platform for interactive large-scale genome analysis},
+ author={Giardine, Belinda and Riemer, Cathy and Hardison, Ross C and Burhans, Richard and Elnitski, Laura and Shah, Prachi and Zhang, Yi and Blankenberg, Daniel and Albert, Istvan and Taylor, James and Miller, Webb C and Kent, W James and Nekrutenko, Anton},
+ journal={Genome research},
+ volume={15},
+ number={10},
+ pages={1451--1455},
+ year={2005},
+ publisher={Cold Spring Harbor Lab}
+}
+
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.