1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3ee0e5ee1b37/
changeset: 3ee0e5ee1b37
user: jgoecks
date: 2012-12-10 21:38:57
summary: Trackster: remove unused var.
affected #: 1 file
diff -r 2fdc0270baad3b020118d05cc3429993a3d80207 -r 3ee0e5ee1b375c0fd580c26a93850f2fa44f93f0 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -3278,7 +3278,6 @@
this.tile_predraw_init();
var canvas = track.view.canvas_manager.new_canvas(),
- tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = region.get('start'),
tile_high = region.get('end'),
all_data_index = 0,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bd020b5aa531/
changeset: bd020b5aa531
user: dan
date: 2012-12-10 18:48:58
summary: Fix for SelectToolParameter rerun,workflow when multiple="true", submited by Jim Johnson.
affected #: 1 file
diff -r 0042b30216fc5dc4f92b314c93a3aa9a0d810d77 -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -714,7 +714,12 @@
else:
if value not in legal_values:
raise ValueError( "An invalid option was selected, please verify" )
- return value
+ return value
+ def to_html_value( self, value, app ):
+ if isinstance( value, list ):
+ return value
+ else:
+ return str( value )
def to_param_dict_string( self, value, other_values={} ):
if value is None:
return "None"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0042b30216fc/
changeset: 0042b30216fc
user: clements
date: 2012-11-06 20:14:22
summary: Added boilerplate for Sphinx doc home page.
affected #: 1 file
diff -r c8f0ea550d51b2c203f5f60568817164c62220fd -r 0042b30216fc5dc4f92b314c93a3aa9a0d810d77 doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,12 +1,41 @@
Galaxy Code Documentation
*************************
-Galaxy is an open, web-based platform for accessible, reproducible, and
+Galaxy_ is an open, web-based platform for accessible, reproducible, and
transparent computational biomedical research.
-- Accessible: Users without programming experience can easily specify parameters and run tools and workflows.
-- Reproducible: Galaxy captures information so that any user can repeat and understand a complete computational analysis.
-- Transparent: Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+- *Accessible:* Users without programming experience can easily specify parameters and run tools and workflows.
+- *Reproducible:* Galaxy captures information so that any user can repeat and understand a complete computational analysis.
+- *Transparent:* Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+
+Two copies of the Galaxy code doumentation are published by the Galaxy Project
+
+- Galaxy-Dist_: This describes the code in the `most recent official release`_ of Galaxy.
+- Galaxy-Central_: Describes the `current code in the development branch`_ of Galaxy. This is the latest checkin, bleeding edge version of the code. The documentation should never be more than an hour behind the code.
+
+Both copies are hosted at ReadTheDocs_, a publicly supported web site for hosting project documentation.
+
+If you have your own copy of the Galaxy source code, you can also generate your own version of this documentation:
+
+::
+
+ $ cd doc
+ $ make html
+
+The generated documentation will be in ``doc/build/html/`` and can be viewed with a web browser. Note that you will need to install Sphinx and a fair number of module dependencies before this will produce output.
+
+.. _Galaxy: http://galaxyproject.org/
+.. _Galaxy-Dist: https://galaxy-dist.readthedocs.org/
+.. _most recent official release: https://bitbucket.org/galaxy/galaxy-dist
+.. _Galaxy-Central: https://galaxy-central.readthedocs.org/
+.. _current code in the development branch: https://bitbucket.org/galaxy/galaxy-central
+.. _ReadTheDocs: https://readthedocs.org/
+
+
+For more on the Galaxy Project, please visit the `project home page`_.
+
+.. _project home page: http://galaxyproject.org/
+
Contents
========
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6f3266a589e3/
changeset: 6f3266a589e3
user: clements
date: 2012-11-06 18:31:28
summary: Added numpy to mock modules list. Readthedocs of galaxy-central is broken, and it's complaining about numpy. So, try adding it to mock modules list and see if that helps.
affected #: 1 file
diff -r 6d86fa9ca5977060372704346a1e9a2b6308a292 -r 6f3266a589e397cbdbe8efe4f4d26b7dcdc8924c doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -274,6 +274,6 @@
return Mock()
# adding pbs_python, DRMAA_python, markupsafe, and drmaa here had no effect.
-MOCK_MODULES = ['tables', 'decorator']
+MOCK_MODULES = ['tables', 'decorator', 'numpy']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6d86fa9ca597/
changeset: 6d86fa9ca597
user: greg
date: 2012-12-07 22:47:24
summary: Handle somce cases of circular dependencies and other corner case siturations with repository dependency definitions.
affected #: 3 files
diff -r facdd387b85e814df7428ca8cbb71828d0ec48a2 -r 6d86fa9ca5977060372704346a1e9a2b6308a292 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -2,12 +2,12 @@
from galaxy import util
from galaxy.tools import parameters
from galaxy.util import inflector
-from galaxy.util.json import *
+from galaxy.util import json
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
-from galaxy.webapps.community.util.container_util import *
-from galaxy.datatypes.checkers import *
-from galaxy.model.orm import *
+from galaxy.webapps.community.util import container_util
+from galaxy.datatypes import checkers
+from galaxy.model.orm import and_
from galaxy.tools.parameters import dynamic_options
from galaxy import eggs
@@ -124,45 +124,49 @@
# Datatypes container.
if metadata and 'datatypes' in metadata:
datatypes = metadata[ 'datatypes' ]
- folder_id, datatypes_root_folder = build_datatypes_folder( folder_id, datatypes )
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
# Invalid tools container.
if metadata and 'invalid_tools' in metadata:
invalid_tool_configs = metadata[ 'invalid_tools' ]
- folder_id, invalid_tools_root_folder = build_invalid_tools_folder( folder_id,
- invalid_tool_configs,
- changeset_revision,
- repository=repository,
- label='Invalid tools' )
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ invalid_tool_configs,
+ changeset_revision,
+ repository=repository,
+ label='Invalid tools' )
containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
# Readme files container.
readme_files_dict = build_readme_files_dict( repository_metadata )
- folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
toolshed_base_url = str( url_for( '/', qualified=True ) ).rstrip( '/' )
- folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=changeset_revision,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ repository_name=repository.name,
+ repository_owner=repository.user.username,
+ changeset_revision=changeset_revision,
+ folder_id=folder_id,
+ repository_dependencies=repository_dependencies )
if repository_dependencies_root_folder:
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
# Tool dependencies container.
if metadata and 'tool_dependencies' in metadata:
tool_dependencies = metadata[ 'tool_dependencies' ]
- folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=False )
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Valid tools container.
if metadata and 'tools' in metadata:
valid_tools = metadata[ 'tools' ]
- folder_id, valid_tools_root_folder = build_tools_folder( folder_id, valid_tools, repository, changeset_revision, label='Valid tools' )
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ valid_tools,
+ repository,
+ changeset_revision,
+ label='Valid tools' )
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Workflows container.
if metadata and 'workflows' in metadata:
workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
@@ -190,6 +194,29 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
+def can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
+ """
+ Handle circular repository dependencies that could result in an infinite loop by determining if it is safe to add an entry to the
+ repository dependencies container.
+ """
+ # First check for an exact match - if this is true, the changeset revision was not updated.
+ repository_dependency_as_key = container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ],
+ repository_dependency[ 1 ],
+ repository_dependency[ 2 ],
+ repository_dependency[ 3] )
+ current_repository_key_as_repository_dependency = current_repository_key.split( container_util.STRSEP )
+ if repository_dependency_as_key in all_repository_dependencies:
+ val = all_repository_dependencies[ repository_dependency_as_key ]
+ if current_repository_key_as_repository_dependency in val:
+ return False
+ # Now handle the case where an update to the changeset revision was done, so everything will match except the changeset_revision.
+ repository_dependency_as_partial_key = container_util.STRSEP.join( [ repository_dependency[ 0 ], repository_dependency[ 1 ], repository_dependency[ 2 ] ] )
+ for key in all_repository_dependencies:
+ if key.startswith( repository_dependency_as_partial_key ):
+ val = all_repository_dependencies[ key ]
+ if current_repository_key_as_repository_dependency in val:
+ return False
+ return True
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
@@ -736,8 +763,8 @@
elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
full_path = str( os.path.abspath( os.path.join( root, name ) ) )
if os.path.getsize( full_path ) > 0:
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ if not ( checkers.check_binary( full_path ) or checkers.check_image( full_path ) or checkers.check_gzip( full_path )[ 0 ]
+ or checkers.check_bz2( full_path )[ 0 ] or checkers.check_zip( full_path ) ):
try:
# Make sure we're looking at a tool config and not a display application config or something else.
element_tree = util.parse_xml( full_path )
@@ -779,7 +806,7 @@
fp = open( relative_path, 'rb' )
workflow_text = fp.read()
fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
+ exported_workflow_dict = json.from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
if readme_files:
@@ -1189,106 +1216,112 @@
repository_dependencies_dict = metadata[ 'repository_dependencies' ]
# The repository_dependencies entry in the metadata is a dictionary that may have a value for a 'description' key. We want to
# store the value of this key only once, the first time through this recursive method.
- repository_dependencies_root_key = generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=repository_metadata.changeset_revision )
+ current_repository_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
+ repository_name=repository.name,
+ repository_owner=repository.user.username,
+ changeset_revision=repository_metadata.changeset_revision )
if not all_repository_dependencies:
- # Initialize the all_repository_dependencies dictionary.
- all_repository_dependencies[ 'root_key' ] = repository_dependencies_root_key
- all_repository_dependencies[ repository_dependencies_root_key ] = []
+ # Initialize the all_repository_dependencies dictionary. It's safe to assume that current_repository_key in this case will have a value.
+ all_repository_dependencies[ 'root_key' ] = current_repository_key
+ all_repository_dependencies[ current_repository_key ] = []
if 'description' not in all_repository_dependencies:
description = repository_dependencies_dict.get( 'description', None )
all_repository_dependencies[ 'description' ] = description
# The next key of interest in repository_dependencies_dict is 'repository_dependencies', which is a list of tuples.
repository_dependencies_tups = repository_dependencies_dict[ 'repository_dependencies' ]
+ if repository_dependencies_tups and current_repository_key:
+ # Remove all repository dependencies that point to a revision within its own repository.
+ repository_dependencies_tups = remove_ropository_dependency_reference_to_self( repository_dependencies_tups, current_repository_key )
for repository_dependency in repository_dependencies_tups:
- # Skip repository dependencies that point to the root repository.
- check_key = generate_repository_dependencies_key_for_repository( toolshed_base_url=repository_dependency[ 0 ],
- repository_name=repository_dependency[ 1 ],
- repository_owner=repository_dependency[ 2 ],
- changeset_revision=repository_dependency[ 3 ] )
- if check_key == repository_dependencies_root_key:
- handled.append( repository_dependency )
- elif repository_dependency not in handled and repository_dependency not in repository_dependencies:
+ if repository_dependency not in handled and repository_dependency not in repository_dependencies:
+ # The following if statement handles repositories dependencies that are circular in nature.
+ if current_repository_key:
+ if current_repository_key in all_repository_dependencies:
+ # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies.
+ all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
+ if repository_dependency not in all_repository_dependencies_val:
+ all_repository_dependencies_val.append( repository_dependency )
+ all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
+ elif can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
+ # We don't have a circular dependency that could result in an infinite loop.
+ all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
repository_dependencies.append( repository_dependency )
else:
- repository_dependencies_root_key = None
- if repository_dependencies:
- repository_dependency = repository_dependencies.pop( 0 )
- # Cast unicode to string.
- repository_dependency = [ str( item ) for item in repository_dependency ]
- tool_shed, name, owner, changeset_revision = repository_dependency
- if repository_dependencies_root_key:
- if repository_dependencies_root_key in all_repository_dependencies:
- # See if this repository_dependency is contained in the list associated with the repository_dependencies_root_key.
- all_repository_dependencies_val = all_repository_dependencies[ repository_dependencies_root_key ]
- if repository_dependency not in all_repository_dependencies_val:
- all_repository_dependencies_val.append( repository_dependency )
- all_repository_dependencies[ repository_dependencies_root_key ] = all_repository_dependencies_val
- handled.append( repository_dependency )
- else:
- # Insert this repository_dependency.
- all_repository_dependencies[ repository_dependencies_root_key ] = [ repository_dependency ]
+ # The current repository does not have repository dependencies defined for it.
+ current_repository_key = None
+ # The following if statement handles repositories dependencies that are circular in nature.
+ if current_repository_key and current_repository_key in all_repository_dependencies:
+ repository_dependencies_tups = [ rd for rd in all_repository_dependencies[ current_repository_key ] ]
+ if repository_dependencies_tups:
+ repository_dependency = repository_dependencies_tups.pop( 0 )
+ if repository_dependency not in handled:
handled.append( repository_dependency )
- if tool_shed_is_this_tool_shed( tool_shed ):
- # The repository is in the current tool shed.
- required_repository = get_repository_by_name_and_owner( trans, name, owner )
- required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
- trans.security.encode_id( required_repository.id ),
- changeset_revision )
- if required_repository_metadata:
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- else:
- # The repository changeset_revision is no longer installable, so see if there's been an update.
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- required_changeset_revision = get_next_downloadable_changeset_revision( required_repository, required_repo, changeset_revision )
+ if repository_dependency in repository_dependencies:
+ repository_dependencies.remove( repository_dependency )
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if tool_shed_is_this_tool_shed( toolshed ):
+ required_repository = get_repository_by_name_and_owner( trans, name, owner )
required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
trans.security.encode_id( required_repository.id ),
- required_changeset_revision )
+ changeset_revision )
if required_repository_metadata:
- # The changeset_revision defined in a repository_dependencies.xml file is outdated, so we need to fix appropriate
- # entries in our all_repository_dependencies dictionary.
- updated_repository_dependency = [ tool_shed, name, owner, required_changeset_revision ]
- for k, v in all_repository_dependencies.items():
- if k in [ 'root_key', 'description' ]:
- continue
- for i, current_repository_dependency in enumerate( v ):
- current_tool_shed, current_name, current_owner, current_changeset_revision = current_repository_dependency
- if tool_shed == current_tool_shed and name == current_name and owner == current_owner and changeset_revision == current_changeset_revision:
- if updated_repository_dependency in v:
- # We've already stored the updated repository_dependency, so remove the outdated one.
- v = v.remove( repository_dependency )
- else:
- # Store the updated repository_dependency.
- v[ i ] = updated_repository_dependency
- all_repository_dependencies[ k ] = v
- if required_repository_metadata:
- # The required_repository_metadata changeset_revision is installable.
- required_metadata = required_repository_metadata.metadata
- if required_metadata:
- return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
- repository=required_repository,
- repository_metadata=required_repository_metadata,
- toolshed_base_url=tool_shed,
- repository_dependencies=repository_dependencies,
- all_repository_dependencies=all_repository_dependencies,
- handled=handled )
- else:
- # The repository is in a different tool shed, so build an url and send a request.
- raise Exception( "Repository dependencies that refer to repositories in other tool sheds is not yet supported." )
+ required_repo_dir = required_repository.repo_path( trans.app )
+ required_repo = hg.repository( get_configured_ui(), required_repo_dir )
+ else:
+ # The repository changeset_revision is no longer installable, so see if there's been an update.
+ required_repo_dir = required_repository.repo_path( trans.app )
+ required_repo = hg.repository( get_configured_ui(), required_repo_dir )
+ required_changeset_revision = get_next_downloadable_changeset_revision( required_repository, required_repo, changeset_revision )
+ required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( required_repository.id ),
+ required_changeset_revision )
+ if required_repository_metadata:
+ # The changeset_revision defined in a repository_dependencies.xml file is outdated, so we need to fix appropriate
+ # entries in our all_repository_dependencies dictionary.
+ updated_repository_dependency = [ toolshed, name, owner, required_changeset_revision ]
+ for k, v in all_repository_dependencies.items():
+ if k in [ 'root_key', 'description' ]:
+ continue
+ for i, current_repository_dependency in enumerate( v ):
+ cts, cn, co, ccr = current_repository_dependency
+ if toolshed == cts and name == cn and owner == co and changeset_revision == ccr:
+ if updated_repository_dependency in v:
+ # We've already stored the updated repository_dependency, so remove the outdated one.
+ v = v.remove( repository_dependency )
+ all_repository_dependencies[ k ] = v
+ else:
+ # Store the updated repository_dependency.
+ v[ i ] = updated_repository_dependency
+ all_repository_dependencies[ k ] = v
+ if required_repository_metadata:
+ # The required_repository_metadata changeset_revision is installable.
+ required_metadata = required_repository_metadata.metadata
+ if required_metadata:
+ for repository_dependency in repository_dependencies_tups:
+ if repository_dependency not in repository_dependencies:
+ repository_dependencies.append( repository_dependency )
+ return get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=required_repo,
+ repository=required_repository,
+ repository_metadata=required_repository_metadata,
+ toolshed_base_url=toolshed,
+ repository_dependencies=repository_dependencies,
+ all_repository_dependencies=all_repository_dependencies,
+ handled=handled )
+ else:
+ # The repository is in a different tool shed, so build an url and send a request.
+ error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
+ error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
+ log.debug( error_message )
return all_repository_dependencies
def get_repository_file_contents( file_path ):
- if is_gzip( file_path ):
+ if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
- elif is_bz2( file_path ):
+ elif checkers.is_bz2( file_path ):
safe_str = to_safe_string( '\nbz2 compressed file\n' )
- elif check_zip( file_path ):
+ elif checkers.check_zip( file_path ):
safe_str = to_safe_string( '\nzip compressed file\n' )
- elif check_binary( file_path ):
+ elif checkers.check_binary( file_path ):
safe_str = to_safe_string( '\nBinary file\n' )
else:
safe_str = ''
@@ -1526,6 +1559,18 @@
shutil.rmtree( dir )
except:
pass
+def remove_ropository_dependency_reference_to_self( repository_dependencies, repository_key ):
+ """Remove all repository dependencies that point to a revision within its own repository."""
+ clean_repository_dependencies = []
+ repository_tup = repository_key.split( container_util.STRSEP )
+ rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_tup
+ for repository_dependency in repository_dependencies:
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if rd_toolshed == toolshed and rd_name == name and rd_owner == owner:
+ log.debug( "Removing repository dependency for repository %s owned by %s since it refers to a revision within itself." % ( name, owner ) )
+ else:
+ clean_repository_dependencies.append( repository_dependency )
+ return clean_repository_dependencies
def remove_tool_dependency_installation_directory( dependency_install_dir ):
if os.path.exists( dependency_install_dir ):
try:
diff -r facdd387b85e814df7428ca8cbb71828d0ec48a2 -r 6d86fa9ca5977060372704346a1e9a2b6308a292 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -188,7 +188,7 @@
else:
metadata_dict = {}
if 'tool_dependencies' not in metadata_dict:
- message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml". '
+ message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file". '
status = 'warning'
log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
diff -r facdd387b85e814df7428ca8cbb71828d0ec48a2 -r 6d86fa9ca5977060372704346a1e9a2b6308a292 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -212,7 +212,7 @@
repository_dependencies_folder.description = repository_dependencies.get( 'description', None )
repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
del repository_dependencies[ 'description' ]
- # The remaining keys in repository_dependencies should all be folders.
+ # The current keys in repository_dependencies should all be folders.
folder_keys = repository_dependencies.keys()
# If repository_dependencies_folder_key is an entry in repository_dependencies, process it first.
if repository_dependencies_folder_key in repository_dependencies:
@@ -404,7 +404,7 @@
STRSEP,
str( changeset_revision ) )
def get_folder( folder, key ):
- if folder and folder.key == key:
+ if folder.key == key:
return folder
for sub_folder in folder.folders:
return get_folder( sub_folder, key )
@@ -421,25 +421,30 @@
folder_keys, folder_id, repository_dependency_id, repository_name, repository_owner, changeset_revision,
key, val ):
# Only create a new folder object if necessary.
- folder = get_folder( repository_dependencies_root_folder, key )
+ folder = get_folder( repository_dependencies_folder, key )
if not folder:
folder_id += 1
label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, key )
folder = Folder( id=folder_id, key=key, label=label, parent=repository_dependencies_folder )
for repository_dependency_tup in val:
toolshed, name, owner, changeset_revision = repository_dependency_tup
- if is_root_repository( repository_dependencies_folder_key, toolshed, name, owner ):
- # Do not include repository dependencies that point to a revision within the same repository.
- continue
if is_or_should_be_folder( folder_keys, toolshed, name, owner, changeset_revision ):
check_folder_key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision )
- if get_folder( repository_dependencies_root_folder, check_folder_key ):
- continue
+ check_folder = get_folder( repository_dependencies_folder, check_folder_key )
+ if check_folder:
+ repository_dependency_id += 1
+ repository_dependency = RepositoryDependency( id=repository_dependency_id,
+ toolshed=toolshed,
+ repository_name=name,
+ repository_owner=owner,
+ changeset_revision=changeset_revision )
+ if not check_folder.contains_repository_dependency( repository_dependency ):
+ check_folder.repository_dependencies.append( repository_dependency )
else:
# Create a new folder, which may be populated later.
folder_id += 1
label = generate_repository_dependencies_folder_label_from_key( name, owner, changeset_revision, key )
- sub_folder = Folder( id=folder_id, key=check_folder_key, label=label, parent=repository_dependencies_folder )
+ sub_folder = Folder( id=folder_id, key=check_folder_key, label=label, parent=folder )
folder.folders.append( sub_folder )
else:
repository_dependency_id += 1
@@ -458,11 +463,6 @@
def is_or_should_be_folder( folder_keys, toolshed, repository_name, repository_owner, changeset_revision ):
key = '%s%s%s%s%s%s%s' % ( toolshed, STRSEP, repository_name, STRSEP, repository_owner, STRSEP, changeset_revision )
return key in folder_keys
-def is_root_repository( repository_dependencies_folder_key, toolshed, repository_name, repository_owner ):
- # Return True if a repository dependency points to a revision within it's own repository.
- repository_dependencies_folder_tup = repository_dependencies_folder_key.split( STRSEP )
- rdf_toolshed, rdf_repository_name, rdf_repository_owner, rdf_changeset_revision = repository_dependencies_folder_tup
- return rdf_toolshed == toolshed and rdf_repository_name == repository_name and rdf_repository_owner == repository_owner
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.