1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8704a185d43e/
Changeset: 8704a185d43e
User: dan
Date: 2014-01-17 21:45:04
Summary: Change log.error to log.debug.
Affected #: 1 file
diff -r 82b5aef7a1da9e4129ba2a3920a54048642cc991 -r 8704a185d43ef02b0dd763a89a9b003df0326d90 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -400,7 +400,7 @@
if fields not in self.data or allow_duplicates:
self.data.append( fields )
else:
- log.error( "Attempted to add fields (%s) to data table '%s', but this entry already exists and allow_duplicates is False.", fields, self.name )
+ log.debug( "Attempted to add fields (%s) to data table '%s', but this entry already exists and allow_duplicates is False.", fields, self.name )
is_error = True
else:
log.error( "Attempted to add fields (%s) to data table '%s', but there were not enough fields specified ( %i < %i ).", fields, self.name, len( fields ), self.largest_index + 1 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/82b5aef7a1da/
Changeset: 82b5aef7a1da
User: greg
Date: 2014-01-17 19:49:10
Summary: Enhance setting tool shed repository metadata to support tool dependency recipes that define installation of a binary but do not also include a recipe for installing and compiling from source.
Affected #: 3 files
diff -r 79c1a31e269d4668d125c1a277549617d34dae13 -r 82b5aef7a1da9e4129ba2a3920a54048642cc991 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -303,7 +303,8 @@
with make_tmp_dir() as work_dir:
with lcd( work_dir ):
# The first action in the list of actions will be the one that defines the installation process. There
- # are currently only two supported processes; download_by_url and clone via a "shell_command" action type.
+ # are currently three supported processes; download_binary, download_by_url and clone via a "shell_command"
+ # action type.
action_type, action_dict = actions[ 0 ]
if action_type == 'download_binary':
url = action_dict[ 'url' ]
@@ -316,11 +317,13 @@
downloaded_filename = td_common_util.download_binary( url, work_dir )
# Filter out any actions that are not download_binary, chmod, or set_environment.
filtered_actions = filter_actions_after_binary_installation( actions[ 1: ] )
- # Set actions to the same, so that the current download_binary doesn't get re-run in the filtered actions below.
+ # Set actions to the same, so that the current download_binary doesn't get re-run in the
+ # filtered actions below.
actions = filtered_actions
except Exception, e:
log.exception( str( e ) )
- # No binary exists, or there was an error downloading the binary from the generated URL. Proceed with the remaining actions.
+ # No binary exists, or there was an error downloading the binary from the generated URL.
+ # Proceed with the remaining actions.
filtered_actions = actions[ 1: ]
action_type, action_dict = filtered_actions[ 0 ]
# If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that
@@ -345,8 +348,9 @@
is_binary = action_dict.get( 'is_binary', False )
log.debug( 'Attempting to download via url: %s', url )
if 'target_filename' in action_dict:
- # Sometimes compressed archives extract their content to a folder other than the default defined file name. Using this
- # attribute will ensure that the file name is set appropriately and can be located after download, decompression and extraction.
+ # Sometimes compressed archives extract their content to a folder other than the default
+ # defined file name. Using this attribute will ensure that the file name is set appropriately
+ # and can be located after download, decompression and extraction.
downloaded_filename = action_dict[ 'target_filename' ]
else:
downloaded_filename = os.path.split( url )[ -1 ]
@@ -374,8 +378,9 @@
filtered_actions = actions[ 1: ]
url = action_dict[ 'url' ]
if 'target_filename' in action_dict:
- # Sometimes compressed archives extracts their content to a folder other than the default defined file name. Using this
- # attribute will ensure that the file name is set appropriately and can be located after download, decompression and extraction.
+ # Sometimes compressed archives extracts their content to a folder other than the default
+ # defined file name. Using this attribute will ensure that the file name is set appropriately
+ # and can be located after download, decompression and extraction.
filename = action_dict[ 'target_filename' ]
else:
filename = url.split( '/' )[ -1 ]
diff -r 79c1a31e269d4668d125c1a277549617d34dae13 -r 82b5aef7a1da9e4129ba2a3920a54048642cc991 lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
@@ -324,31 +324,37 @@
def parse_package_elem( package_elem, platform_info_dict=None, include_after_install_actions=True ):
"""
- Parse a <package> element within a tool dependency definition and return a list of action tuples. This method is called when setting
- metadata on a repository that includes a tool_dependencies.xml file or when installing a repository that includes a tool_dependencies.xml
- file. If installing, platform_info_dict must be a valid dictionary and include_after_install_actions must be True.
+ Parse a <package> element within a tool dependency definition and return a list of action tuples.
+ This method is called when setting metadata on a repository that includes a tool_dependencies.xml
+ file or when installing a repository that includes a tool_dependencies.xml file. If installing,
+ platform_info_dict must be a valid dictionary and include_after_install_actions must be True.
"""
- # The actions_elem_tuples list contains <actions> tag sets (possibly inside of an <actions_group> tag set) to be processed in the order
- # they are defined in the tool_dependencies.xml file.
+ # The actions_elem_tuples list contains <actions> tag sets (possibly inside of an <actions_group>
+ # tag set) to be processed in the order they are defined in the tool_dependencies.xml file.
actions_elem_tuples = []
- # The tag sets that will go into the actions_elem_list are those that install a compiled binary if the architecture and operating system
- # match it's defined attributes. If compiled binary is not installed, the first <actions> tag set [following those that have the os and
- # architecture attributes] that does not have os or architecture attributes will be processed. This tag set must contain the recipe for
- # downloading and compiling source.
+ # The tag sets that will go into the actions_elem_list are those that install a compiled binary if
+ # the architecture and operating system match it's defined attributes. If compiled binary is not
+ # installed, the first <actions> tag set [following those that have the os and architecture attributes]
+ # that does not have os or architecture attributes will be processed. This tag set must contain the
+ # recipe for downloading and compiling source.
actions_elem_list = []
for elem in package_elem:
if elem.tag == 'actions':
- # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system.
+ # We have an <actions> tag that should not be matched against a specific combination of
+ # architecture and operating system.
in_actions_group = False
actions_elem_tuples.append( ( in_actions_group, elem ) )
elif elem.tag == 'actions_group':
- # We have an actions_group element, and its child <actions> elements should therefore be compared with the current operating system
+ # We have an actions_group element, and its child <actions> elements should therefore be compared
+ # with the current operating system
# and processor architecture.
in_actions_group = True
- # Record the number of <actions> elements so we can filter out any <action> elements that precede <actions> elements.
+ # Record the number of <actions> elements so we can filter out any <action> elements that precede
+ # <actions> elements.
actions_elem_count = len( elem.findall( 'actions' ) )
- # Record the number of <actions> elements that have both architecture and os specified, in order to filter out any
- # platform-independent <actions> elements that come before platform-specific <actions> elements.
+ # Record the number of <actions> elements that have both architecture and os specified, in order
+ # to filter out any platform-independent <actions> elements that come before platform-specific
+ # <actions> elements.
platform_actions_elements = []
for actions_elem in elem.findall( 'actions' ):
if actions_elem.get( 'architecture' ) is not None and actions_elem.get( 'os' ) is not None:
@@ -356,8 +362,9 @@
platform_actions_element_count = len( platform_actions_elements )
platform_actions_elements_processed = 0
actions_elems_processed = 0
- # The tag sets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These will be processed
- # only if they are at the very end of the <actions_group> tag set (after all <actions> tag sets). See below for details.
+ # The tag sets that will go into the after_install_actions list are <action> tags instead of <actions>
+ # tags. These will be processed only if they are at the very end of the <actions_group> tag set (after
+ # all <actions> tag sets). See below for details.
after_install_actions = []
# Inspect the <actions_group> element and build the actions_elem_list and the after_install_actions list.
for child_element in elem:
@@ -365,60 +372,71 @@
actions_elems_processed += 1
system = child_element.get( 'os' )
architecture = child_element.get( 'architecture' )
- # Skip <actions> tags that have only one of architecture or os specified, in order for the count in
- # platform_actions_elements_processed to remain accurate.
+ # Skip <actions> tags that have only one of architecture or os specified, in order for the
+ # count in platform_actions_elements_processed to remain accurate.
if ( system and not architecture ) or ( architecture and not system ):
log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' )
continue
- # Since we are inside an <actions_group> tag set, compare it with our current platform information and filter the <actions>
- # tag sets that don't match. Require both the os and architecture attributes to be defined in order to find a match.
+ # Since we are inside an <actions_group> tag set, compare it with our current platform information
+ # and filter the <actions> tag sets that don't match. Require both the os and architecture attributes
+ # to be defined in order to find a match.
if system and architecture:
platform_actions_elements_processed += 1
- # If either the os or architecture do not match the platform, this <actions> tag will not be considered a match. Skip
- # it and proceed with checking the next one.
+ # If either the os or architecture do not match the platform, this <actions> tag will not be
+ # considered a match. Skip it and proceed with checking the next one.
if platform_info_dict:
if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture:
continue
else:
- # We must not be installing a repository into Galaxy, so determining if we can install a binary is not necessary.
+ # We must not be installing a repository into Galaxy, so determining if we can install a
+ # binary is not necessary.
continue
else:
- # <actions> tags without both os and architecture attributes are only allowed to be specified after platform-specific
- # <actions> tags. If we find a platform-independent <actions> tag before all platform-specific <actions> tags have been
- # processed.
+ # <actions> tags without both os and architecture attributes are only allowed to be specified
+ # after platform-specific <actions> tags. If we find a platform-independent <actions> tag before
+ # all platform-specific <actions> tags have been processed.
if platform_actions_elements_processed < platform_actions_element_count:
- message = 'Error: <actions> tags without os and architecture attributes are only allowed after all <actions> tags with '
- message += 'os and architecture attributes have been defined. Skipping the <actions> tag set with no os or architecture '
- message += 'attributes that has been defined between two <actions> tag sets that have these attributes defined. '
- log.debug( message )
+ debug_msg = 'Error: <actions> tags without os and architecture attributes are only allowed '
+ debug_msg += 'after all <actions> tags with os and architecture attributes have been defined. '
+ debug_msg += 'Skipping the <actions> tag set with no os or architecture attributes that has '
+ debug_msg += 'been defined between two <actions> tag sets that have these attributes defined. '
+ log.debug( debug_msg )
continue
- # If we reach this point, it means one of two things: 1) The system and architecture attributes are not defined in this
- # <actions> tag, or 2) The system and architecture attributes are defined, and they are an exact match for the current
- # platform. Append the child element to the list of elements to process.
+ # If we reach this point, it means one of two things: 1) The system and architecture attributes are
+ # not defined in this <actions> tag, or 2) The system and architecture attributes are defined, and
+ # they are an exact match for the current platform. Append the child element to the list of elements
+ # to process.
actions_elem_list.append( child_element )
elif child_element.tag == 'action':
# Any <action> tags within an <actions_group> tag set must come after all <actions> tags.
if actions_elems_processed == actions_elem_count:
- # If all <actions> elements have been processed, then this <action> element can be appended to the list of actions to
- # execute within this group.
+ # If all <actions> elements have been processed, then this <action> element can be appended to the
+ # list of actions to execute within this group.
after_install_actions.append( child_element )
else:
- # If any <actions> elements remain to be processed, then log a message stating that <action> elements are not allowed
- # to precede any <actions> elements within an <actions_group> tag set.
- message = 'Error: <action> tags are only allowed at the end of an <actions_group> tag set after all <actions> tags. '
- message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) )
- log.debug( message )
+ # If any <actions> elements remain to be processed, then log a message stating that <action>
+ # elements are not allowed to precede any <actions> elements within an <actions_group> tag set.
+ debug_msg = 'Error: <action> tags are only allowed at the end of an <actions_group> tag set after '
+ debug_msg += 'all <actions> tags. Skipping <%s> element with type %s.' % \
+ ( child_element.tag, child_element.get( 'type', 'unknown' ) )
+ log.debug( debug_msg )
continue
if platform_info_dict is None and not include_after_install_actions:
# We must be setting metadata on a repository.
- actions_elem_tuples.append( ( in_actions_group, actions_elem_list[ 0 ] ) )
+ if len( actions_elem_list ) >= 1:
+ actions_elem_tuples.append( ( in_actions_group, actions_elem_list[ 0 ] ) )
+ else:
+ # We are processing a recipe that contains only an <actions_group> tag set for installing a binary,
+ # but does not include an additional recipe for installing and compiling from source.
+ actions_elem_tuples.append( ( in_actions_group, [] ) )
elif platform_info_dict is not None and include_after_install_actions:
# We must be installing a repository.
if after_install_actions:
actions_elem_list.extend( after_install_actions )
actions_elem_tuples.append( ( in_actions_group, actions_elem_list ) )
else:
- # Skip any element that is not <actions> or <actions_group> - this will skip comments, <repository> tags and <readme> tags.
+ # Skip any element that is not <actions> or <actions_group> - this will skip comments, <repository> tags
+ # and <readme> tags.
in_actions_group = False
continue
return actions_elem_tuples
diff -r 79c1a31e269d4668d125c1a277549617d34dae13 -r 82b5aef7a1da9e4129ba2a3920a54048642cc991 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -791,26 +791,30 @@
if sub_elem.tag == 'readme':
requirements_dict[ 'readme' ] = sub_elem.text
elif sub_elem.tag == 'repository':
- # We have a complex repository dependency. If the returned value of repository_dependency_is_valid is True, the tool
- # dependency definition will be set as invalid. This is currently the only case where a tool dependency definition is
- # considered invalid.
- repository_dependency_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app=app,
- repository_elem=sub_elem,
- only_if_compiling_contained_td=False )
+ # We have a complex repository dependency. If the returned value of repository_dependency_is_valid
+ # is True, the tool dependency definition will be set as invalid. This is currently the only case
+ # where a tool dependency definition is considered invalid.
+ repository_dependency_tup, repository_dependency_is_valid, error_message = \
+ handle_repository_elem( app=app, repository_elem=sub_elem, only_if_compiling_contained_td=False )
elif sub_elem.tag == 'install':
package_install_version = sub_elem.get( 'version', '1.0' )
if package_install_version == '1.0':
- # Complex repository dependencies can be defined within the last <actions> tag set contained in an <actions_group> tag set.
- # Comments, <repository> tag sets and <readme> tag sets will be skipped in td_common_util.parse_package_elem().
- actions_elem_tuples = td_common_util.parse_package_elem( sub_elem, platform_info_dict=None, include_after_install_actions=False )
+ # Complex repository dependencies can be defined within the last <actions> tag set contained in an
+ # <actions_group> tag set. Comments, <repository> tag sets and <readme> tag sets will be skipped
+ # in td_common_util.parse_package_elem().
+ actions_elem_tuples = td_common_util.parse_package_elem( sub_elem,
+ platform_info_dict=None,
+ include_after_install_actions=False )
if actions_elem_tuples:
- # We now have a list of a single tuple that looks something like: [(True, <Element 'actions' at 0x104017850>)]
+ # We now have a list of a single tuple that looks something like:
+ # [(True, <Element 'actions' at 0x104017850>)]
actions_elem_tuple = actions_elem_tuples[ 0 ]
in_actions_group, actions_elem = actions_elem_tuple
if in_actions_group:
- # Since we're inside an <actions_group> tag set, inspect the actions_elem to see if a complex repository dependency
- # is defined. By definition, complex repository dependency definitions contained within the last <actions> tag set
- # within an <actions_group> tag set will have the value of "only_if_compiling_contained_td" set to True in
+ # Since we're inside an <actions_group> tag set, inspect the actions_elem to see if a complex
+ # repository dependency is defined. By definition, complex repository dependency definitions
+ # contained within the last <actions> tag set within an <actions_group> tag set will have the
+ # value of "only_if_compiling_contained_td" set to True in
for action_elem in actions_elem:
if action_elem.tag == 'package':
# <package name="libgtextutils" version="0.6">
@@ -834,7 +838,8 @@
# Append the error message to the requirements_dict.
requirements_dict[ 'error' ] = error_message
invalid_tool_dependencies_dict[ dependency_key ] = requirements_dict
- return valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, repository_dependency_is_valid, error_message
+ return valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, \
+ repository_dependency_is_valid, error_message
def generate_repository_dependency_metadata( app, repository_dependencies_config, metadata_dict ):
"""
@@ -1986,7 +1991,8 @@
content_alert_str=content_alert_str,
**kwd )
if error_message:
- # FIXME: This probably should not redirect since this method is called from the upload controller as well as the repository controller.
+ # FIXME: This probably should not redirect since this method is called from the upload controller as well
+ # as the repository controller.
# If there is an error, display it.
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/41cd069aa739/
Changeset: 41cd069aa739
User: dan
Date: 2014-01-16 22:52:39
Summary: Fix for api/tools/show/ when no id is provided.
Affected #: 1 file
diff -r 71186fb4b179fc051e6919dd0e31e94f9cedc3d6 -r 41cd069aa739f735add1b7c6c4bdb5881dc7ed3b lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -129,7 +129,7 @@
webapp.mapper.resource( 'role', 'roles', path_prefix='/api' )
webapp.mapper.resource( 'group', 'groups', path_prefix='/api' )
webapp.mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
- webapp.mapper.connect( '/api/tools/{id:.*?}', action='show', controller="tools" )
+ webapp.mapper.connect( '/api/tools/{id:.+?}', action='show', controller="tools" )
webapp.mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
webapp.mapper.resource( 'genome', 'genomes', path_prefix='/api' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/71186fb4b179/
Changeset: 71186fb4b179
User: dan
Date: 2014-01-16 22:21:41
Summary: Use toolbox.get_tool instead of toolbox.tools_by_id.get in tools api show().
Affected #: 1 file
diff -r 40a5f3139ecf714090347612ec69e0d964911d85 -r 71186fb4b179fc051e6919dd0e31e94f9cedc3d6 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -53,7 +53,7 @@
link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
try:
id = urllib.unquote_plus( id )
- tool = self.app.toolbox.tools_by_id.get( id, None )
+ tool = self.app.toolbox.get_tool( id )
if not tool:
trans.response.status = 404
return { 'error': 'tool not found', 'id': id }
https://bitbucket.org/galaxy/galaxy-central/commits/b2e473cd17cd/
Changeset: b2e473cd17cd
Branch: stable
User: dan
Date: 2014-01-16 22:21:41
Summary: Use toolbox.get_tool instead of toolbox.tools_by_id.get in tools api show().
Affected #: 1 file
diff -r 4e280310697b806aa0e08c80f342e2c93f1558f0 -r b2e473cd17cdd0e5758cee32e74f9be7723658bf lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -50,7 +50,7 @@
link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
try:
id = urllib.unquote_plus( id )
- tool = self.app.toolbox.tools_by_id.get( id, None )
+ tool = self.app.toolbox.get_tool( id )
if not tool:
trans.response.status = 404
return { 'error': 'tool not found', 'id': id }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/40a5f3139ecf/
Changeset: 40a5f3139ecf
User: greg
Date: 2014-01-16 22:13:47
Summary: Forgot None in my last commit
Affected #: 1 file
diff -r 56492a1d0d352db016e7ba785fcbc9822b604d61 -r 40a5f3139ecf714090347612ec69e0d964911d85 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1596,7 +1596,7 @@
return folder_contents
def pretty_print( dict=None ):
- if dict is not:
+ if dict is not None:
return json.to_json_string( dict, sort_keys=True, indent=4 )
def remove_dir( dir ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.