1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/932585f1dd8d/
changeset: 932585f1dd8d
user: jgoecks
date: 2012-11-07 19:16:07
summary: Circster: do not transition labels if there are no data bounds (and hence nothing to transition).
affected #: 1 file
diff -r 7bd34ce09e9fd795df2088be9c13c695b00424cd -r 932585f1dd8d67c28d2c22003af0d9ae318ef947 static/scripts/viz/circster.js
--- a/static/scripts/viz/circster.js
+++ b/static/scripts/viz/circster.js
@@ -803,6 +803,9 @@
// FIXME: (a) pull out function for getting labeled chroms? and (b) function used in transition below
// is copied from UseTicks mixin, so pull out and make generally available.
+ // If there are no data bounds, nothing to transition.
+ if (this.data_bounds.length === 0) { return; }
+
// Transition labels to new radius bounds.
var self = this,
visibleChroms = _.filter(this.chroms_layout, function(c) { return c.endAngle - c.startAngle > 0.08; }),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7bd34ce09e9f/
changeset: 7bd34ce09e9f
user: dannon
date: 2012-11-07 17:02:43
summary: Move doc/build to an appropriate location in .hgignore
affected #: 1 file
diff -r 0a20563f9d46749d041c079907492bf9d4866628 -r 7bd34ce09e9fd795df2088be9c13c695b00424cd .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -41,8 +41,6 @@
static/welcome.html.*
static/welcome.html
-doc/build
-
# Tool data.
tool-data/annotation_profiler_options.xml
tool-data/annotation_profiler_valid_builds.txt
@@ -74,6 +72,9 @@
*/variables.less
static/june_2007_style/blue/base_sprites.less
+# Documentation build files.
+doc/build
+
# Misc
*.orig
.DS_Store
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
5 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1b7f0f272c1a/
changeset: 1b7f0f272c1a
user: dannon
date: 2012-11-07 16:51:50
summary: Security: Sanitize grid filters.
affected #: 1 file
diff -r 6971bbc8bd8418baea51345b891e1aa647d14a88 -r 1b7f0f272c1a3f6b8d55969fbf73122884d23070 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -2,6 +2,7 @@
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import iff
from galaxy.web import url_for
+from galaxy.util import sanitize_text
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.odict import odict
from galaxy.web.framework.helpers import to_unicode
@@ -136,7 +137,8 @@
# Update query.
query = column.filter( trans, trans.user, query, column_filter )
# Upate current filter dict.
- cur_filter_dict[ column.key ] = column_filter
+ #Column filters are rendered in various places, sanitize them all here.
+ cur_filter_dict[ column.key ] = sanitize_text(column_filter)
# Carry filter along to newly generated urls; make sure filter is a string so
# that we can encode to UTF-8 and thus handle user input to filters.
if isinstance( column_filter, list ):
https://bitbucket.org/galaxy/galaxy-central/changeset/643931af6baf/
changeset: 643931af6baf
user: dannon
date: 2012-11-07 16:54:57
summary: Whitespace cleanup.
affected #: 1 file
diff -r 1b7f0f272c1a3f6b8d55969fbf73122884d23070 -r 643931af6baf5cfdd772dab1342f47149a23620c lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -36,7 +36,7 @@
num_page_links = 10
# Set preference names.
cur_filter_pref_name = ".filter"
- cur_sort_key_pref_name = ".sort_key"
+ cur_sort_key_pref_name = ".sort_key"
pass_through_operations = {}
def __init__( self ):
# Determine if any multiple row operations are defined
@@ -45,13 +45,13 @@
if operation.allow_multiple:
self.has_multiple_item_operations = True
break
-
- # If a column does not have a model class, set the column's model class
+
+ # If a column does not have a model class, set the column's model class
# to be the grid's model class.
for column in self.columns:
if not column.model_class:
column.model_class = self.model_class
-
+
def __call__( self, trans, **kwargs ):
# Get basics.
# FIXME: pretty sure this is only here to pass along, can likely be eliminated
@@ -88,7 +88,7 @@
cur_filter_dict = {}
for column in self.columns:
if column.key:
- # Get the filter criterion for the column. Precedence is (a) if using default filter, only look there; otherwise,
+ # Get the filter criterion for the column. Precedence is (a) if using default filter, only look there; otherwise,
# (b) look in kwargs; and (c) look in base filter.
column_filter = None
if use_default_filter:
@@ -98,7 +98,7 @@
# Queries that include table joins cannot guarantee unique column names. This problem is
# handled by setting the column_filter value to <TableName>.<ColumnName>.
column_filter = kwargs.get( "f-" + column.model_class.__name__ + ".%s" % column.key )
- elif "f-" + column.key in kwargs:
+ elif "f-" + column.key in kwargs:
column_filter = kwargs.get( "f-" + column.key )
elif column.key in base_filter:
column_filter = base_filter.get( column.key )
@@ -108,7 +108,7 @@
if isinstance( item, basestring):
try:
# Not clear what we're decoding, so recurse to ensure that we catch everything.
- decoded_item = from_json_string( item )
+ decoded_item = from_json_string( item )
if isinstance( decoded_item, list):
decoded_list = from_json_string_recurse( decoded_item )
else:
@@ -146,7 +146,7 @@
for filter in column_filter:
if not isinstance( filter, basestring ):
filter = unicode( filter ).encode("utf-8")
- extra_url_args[ "f-" + column.key ] = to_json_string( column_filter )
+ extra_url_args[ "f-" + column.key ] = to_json_string( column_filter )
else:
# Process singleton filter.
if not isinstance( column_filter, basestring ):
@@ -190,19 +190,19 @@
if self.use_paging:
if 'page' in kwargs:
if kwargs['page'] == 'all':
- page_num = 0
+ page_num = 0
else:
page_num = int( kwargs['page'] )
else:
page_num = 1
-
+
if page_num == 0:
# Show all rows in page.
total_num_rows = query.count()
page_num = 1
num_pages = 1
else:
- # Show a limited number of rows. Before modifying query, get the total number of rows that query
+ # Show a limited number of rows. Before modifying query, get the total number of rows that query
# returns so that the total number of pages can be computed.
total_num_rows = query.count()
query = query.limit( self.num_rows_per_page ).offset( ( page_num-1 ) * self.num_rows_per_page )
@@ -211,11 +211,11 @@
# Defaults.
page_num = 1
num_pages = 1
-
- # There are some places in grid templates where it's useful for a grid
+
+ # There are some places in grid templates where it's useful for a grid
# to have its current filter.
self.cur_filter_dict = cur_filter_dict
-
+
# Preserve grid state: save current filter and sort key.
if self.preserve_state:
pref_name = unicode( self.__class__.__name__ + self.cur_filter_pref_name )
@@ -253,10 +253,10 @@
return url_for( **new_kwargs )
self.use_panels = ( kwargs.get( 'use_panels', False ) in [ True, 'True', 'true' ] )
async_request = ( ( self.use_async ) and ( kwargs.get( 'async', False ) in [ True, 'True', 'true'] ) )
- # Currently, filling the template returns a str object; this requires decoding the string into a
- # unicode object within mako templates. What probably should be done is to return the template as
+ # Currently, filling the template returns a str object; this requires decoding the string into a
+ # unicode object within mako templates. What probably should be done is to return the template as
# utf-8 unicode; however, this would require encoding the object as utf-8 before returning the grid
- # results via a controller method, which is require substantial changes. Hence, for now, return grid
+ # results via a controller method, which is require substantial changes. Hence, for now, return grid
# as str.
return trans.fill_template( iff( async_request, self.async_template, self.template ),
grid=self,
@@ -303,7 +303,7 @@
# (gvk) Is this method necessary? Why not simply build the entire query,
# including applying filters in the build_initial_query() method?
return query
-
+
class GridColumn( object ):
def __init__( self, label, key=None, model_class=None, method=None, format=None, \
link=None, attach_popup=False, visible=True, ncells=1, nowrap=False, \
@@ -360,16 +360,16 @@
if column_name is None:
column_name = self.key
if ascending:
- query = query.order_by( self.model_class.table.c.get( column_name ).asc() )
+ query = query.order_by( self.model_class.table.c.get( column_name ).asc() )
else:
query = query.order_by( self.model_class.table.c.get( column_name ).desc() )
return query
-
+
class ReverseSortColumn( GridColumn ):
""" Column that reverses sorting; this is useful when the natural sort is descending. """
def sort( self, trans, query, ascending, column_name=None ):
return GridColumn.sort( self, trans, query, (not ascending), column_name=column_name )
-
+
class TextColumn( GridColumn ):
""" Generic column that employs freetext and, hence, supports freetext, case-independent filtering. """
def filter( self, trans, user, query, column_filter ):
@@ -380,7 +380,7 @@
query = query.filter( self.get_filter( trans, user, column_filter ) )
return query
def get_filter( self, trans, user, column_filter ):
- """ Returns a SQLAlchemy criterion derived from column_filter. """
+ """ Returns a SQLAlchemy criterion derived from column_filter. """
if isinstance( column_filter, basestring ):
return self.get_single_filter( user, column_filter )
elif isinstance( column_filter, list ):
@@ -406,7 +406,7 @@
if column_name is None:
column_name = self.key
if ascending:
- query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).asc() )
+ query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).asc() )
else:
query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).desc() )
return query
@@ -430,9 +430,9 @@
class IntegerColumn( TextColumn ):
"""
- Integer column that employs freetext, but checks that the text is an integer,
+ Integer column that employs freetext, but checks that the text is an integer,
so support filtering on integer values.
-
+
IMPORTANT NOTE: grids that use this column type should not include the column
in the cols_to_filter list of MulticolFilterColumn ( i.e., searching on this
column type should not be performed in the grid's standard search - it won't
@@ -440,8 +440,8 @@
that search on this column should use 'filterable="advanced"' so that searching
is only performed in the advanced search component, restricting the search to
the specific column.
-
- This is useful for searching on object ids or other integer columns. See the
+
+ This is useful for searching on object ids or other integer columns. See the
JobIdColumn column in the SpecifiedDateListGrid class in the jobs controller of
the reports webapp for an example.
"""
@@ -452,14 +452,14 @@
def sort( self, trans, query, ascending, column_name=None ):
"""Sort query using this column."""
return GridColumn.sort( self, trans, query, ascending, column_name=column_name )
-
+
class CommunityRatingColumn( GridColumn, UsesItemRatings ):
""" Column that displays community ratings for an item. """
def get_value( self, trans, grid, item ):
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, item, webapp_model=trans.model )
- return trans.fill_template( "community_rating.mako",
- ave_item_rating=ave_item_rating,
- num_ratings=num_ratings,
+ return trans.fill_template( "community_rating.mako",
+ ave_item_rating=ave_item_rating,
+ num_ratings=num_ratings,
item_id=trans.security.encode_id( item.id ) )
def sort( self, trans, query, ascending, column_name=None ):
def get_foreign_key( source_class, target_class ):
@@ -511,12 +511,12 @@
return ann_snippet
def get_single_filter( self, user, a_filter ):
""" Filter by annotation and annotation owner. """
- return self.model_class.annotations.any(
- and_( func.lower( self.model_annotation_association_class.annotation ).like( "%" + a_filter.lower() + "%" ),
- # TODO: not sure why, to filter by owner's annotations, we have to do this rather than
+ return self.model_class.annotations.any(
+ and_( func.lower( self.model_annotation_association_class.annotation ).like( "%" + a_filter.lower() + "%" ),
+ # TODO: not sure why, to filter by owner's annotations, we have to do this rather than
# 'self.model_class.user==self.model_annotation_association_class.user'
- self.model_annotation_association_class.table.c.user_id==self.model_class.table.c.user_id ) )
-
+ self.model_annotation_association_class.table.c.user_id==self.model_class.table.c.user_id ) )
+
class CommunityTagsColumn( TextColumn ):
""" Column that supports community tags. """
def __init__( self, col_name, key, model_class=None, model_tag_association_class=None, filterable=None, grid_name=None ):
@@ -549,7 +549,7 @@
# Filter by all values.
clause_list.append( self.model_class.tags.any( func.lower( self.model_tag_association_class.user_value ).like( "%" + value.lower() + "%" ) ) )
return and_( *clause_list )
-
+
class IndividualTagsColumn( CommunityTagsColumn ):
""" Column that supports individual tags. """
def get_value( self, trans, grid, item ):
@@ -577,7 +577,7 @@
# Filter by individual's tag values.
clause_list.append( self.model_class.tags.any( and_( func.lower( self.model_tag_association_class.user_value ).like( "%" + value.lower() + "%" ), self.model_tag_association_class.user == user ) ) )
return and_( *clause_list )
-
+
class MulticolFilterColumn( TextColumn ):
""" Column that performs multicolumn filtering. """
def __init__( self, col_name, cols_to_filter, key, visible, filterable="default" ):
@@ -601,7 +601,7 @@
clause_list.append( column.get_filter( trans, user, column_filter ) )
complete_filter = or_( *clause_list )
return query.filter( complete_filter )
-
+
class OwnerColumn( TextColumn ):
""" Column that lists item's owner. """
def get_value( self, trans, grid, item ):
@@ -609,7 +609,7 @@
def sort( self, trans, query, ascending, column_name=None ):
""" Sort column using case-insensitive alphabetical sorting on item's username. """
if ascending:
- query = query.order_by( func.lower ( self.model_class.username ).asc() )
+ query = query.order_by( func.lower ( self.model_class.username ).asc() )
else:
query = query.order_by( func.lower( self.model_class.username ).desc() )
return query
@@ -751,17 +751,17 @@
return self.condition( item )
else:
return True
-
+
class DisplayByUsernameAndSlugGridOperation( GridOperation ):
""" Operation to display an item by username and slug. """
def get_url_args( self, item ):
return { 'action' : 'display_by_username_and_slug', 'username' : item.user.username, 'slug' : item.slug }
-
+
class GridAction( object ):
def __init__( self, label=None, url_args=None ):
self.label = label
self.url_args = url_args
-
+
class GridColumnFilter( object ):
def __init__( self, label, args=None ):
self.label = label
https://bitbucket.org/galaxy/galaxy-central/changeset/36ecf36af10d/
changeset: 36ecf36af10d
user: dannon
date: 2012-11-07 16:56:26
summary: Fix incorrect new_args vs new_kwargs. Remove dead code.
affected #: 1 file
diff -r 643931af6baf5cfdd772dab1342f47149a23620c -r 36ecf36af10d88e6cc35666352a69f35c9313439 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -116,7 +116,6 @@
except ValueError:
decoded_list = [ unicode ( item ) ]
elif isinstance( item, list):
- return_val = []
for element in item:
a_list = from_json_string_recurse( element )
decoded_list = decoded_list + a_list
@@ -247,7 +246,7 @@
if 'id' in new_kwargs:
id = new_kwargs[ 'id' ]
if isinstance( id, list ):
- new_args[ 'id' ] = [ trans.security.encode_id( i ) for i in id ]
+ new_kwargs[ 'id' ] = [ trans.security.encode_id( i ) for i in id ]
else:
new_kwargs[ 'id' ] = trans.security.encode_id( id )
return url_for( **new_kwargs )
https://bitbucket.org/galaxy/galaxy-central/changeset/4379576e8199/
changeset: 4379576e8199
user: dannon
date: 2012-11-07 16:57:54
summary: Sphinx: Add doc/build to .hgignore.
affected #: 1 file
diff -r 36ecf36af10d88e6cc35666352a69f35c9313439 -r 4379576e8199c18b11ec448f090e8300bc0f372d .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -41,6 +41,8 @@
static/welcome.html.*
static/welcome.html
+doc/build
+
# Tool data.
tool-data/annotation_profiler_options.xml
tool-data/annotation_profiler_valid_builds.txt
https://bitbucket.org/galaxy/galaxy-central/changeset/0a20563f9d46/
changeset: 0a20563f9d46
user: dannon
date: 2012-11-07 17:00:23
summary: Merge.
affected #: 4 files
diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 lib/galaxy/tool_shed/common_util.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/common_util.py
@@ -0,0 +1,93 @@
+import os, urllib2
+from galaxy import util
+from galaxy.util.odict import odict
+from galaxy.tool_shed.encoding_util import *
+
+REPOSITORY_OWNER = 'devteam'
+
+def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
+ # Get the 000x_tools.xml file associated with the current migrate_tools version number.
+ tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
+ # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
+ migrated_tool_configs_dict = odict()
+ tree = util.parse_xml( tools_xml_file_path )
+ root = tree.getroot()
+ tool_shed = root.get( 'name' )
+ tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
+ # The default behavior is that the tool shed is down.
+ tool_shed_accessible = False
+ if tool_shed_url:
+ for elem in root:
+ if elem.tag == 'repository':
+ tool_dependencies = []
+ tool_dependencies_dict = {}
+ repository_name = elem.get( 'name' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
+ ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
+ try:
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ tool_shed_accessible = True
+ except Exception, e:
+ # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
+ tool_shed_accessible = False
+ print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
+ if tool_shed_accessible:
+ if text:
+ tool_dependencies_dict = tool_shed_decode( text )
+ for dependency_key, requirements_dict in tool_dependencies_dict.items():
+ tool_dependency_name = requirements_dict[ 'name' ]
+ tool_dependency_version = requirements_dict[ 'version' ]
+ tool_dependency_type = requirements_dict[ 'type' ]
+ tool_dependency_readme = requirements_dict.get( 'readme', '' )
+ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
+ for tool_elem in elem.findall( 'tool' ):
+ migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
+ if tool_shed_accessible:
+ # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
+ missing_tool_configs_dict = odict()
+ for tool_panel_config in tool_panel_configs:
+ tree = util.parse_xml( tool_panel_config )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ else:
+ exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
+ exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
+ raise Exception( exception_msg )
+ return tool_shed_accessible, missing_tool_configs_dict
+def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
+ file_path = elem.get( 'file', None )
+ if file_path:
+ path, name = os.path.split( file_path )
+ if name in migrated_tool_configs_dict:
+ tool_dependencies = migrated_tool_configs_dict[ name ]
+ missing_tool_configs_dict[ name ] = tool_dependencies
+ return missing_tool_configs_dict
+def get_non_shed_tool_panel_configs( app ):
+ # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
+ config_filenames = []
+ for config_filename in app.config.tool_configs:
+ # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
+ # <toolbox tool_path="../shed_tools">
+ tree = util.parse_xml( config_filename )
+ root = tree.getroot()
+ tool_path = root.get( 'tool_path', None )
+ if tool_path is None:
+ config_filenames.append( config_filename )
+ return config_filenames
+def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
+ search_str = '://%s' % tool_shed
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( search_str ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ return None
diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,8 +7,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
-
-REPOSITORY_OWNER = 'devteam'
+from galaxy.tool_shed.common_util import *
class InstallManager( object ):
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
@@ -19,10 +18,11 @@
self.app = app
self.toolbox = self.app.toolbox
self.migrated_tools_config = migrated_tools_config
- # If install_dependencies is True, but tool_dependency_dir is not set,
- # do not attempt to install, but print informative error message
+ # If install_dependencies is True but tool_dependency_dir is not set, do not attempt to install but print informative error message.
if install_dependencies and app.config.tool_dependency_dir is None:
- raise Exception( 'You are attempting to install tool dependencies, but do not have a value for "tool_dependency_dir" set in your ini file. Please set this to the path where you would like to install dependencies and rerun the migration script.' )
+ message = 'You are attempting to install tool dependencies but do not have a value for "tool_dependency_dir" set in your universe_wsgi.ini '
+ message += 'file. Set this location value to the path where you want tool dependencies installed and rerun the migration script.'
+ raise Exception( message )
# Get the local non-shed related tool panel configs (there can be more than one, and the default name is tool_conf.xml).
self.proprietary_tool_confs = self.non_shed_tool_panel_configs
self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
@@ -38,8 +38,39 @@
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = REPOSITORY_OWNER
index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config )
- for repository_elem in root:
- self.install_repository( repository_elem, install_dependencies )
+ # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
+ # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
+ # The default behavior is that the tool shed is down.
+ tool_shed_accessible = False
+ tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ if tool_panel_configs:
+ # The missing_tool_configs_dict contents are something like:
+ # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+ tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ else:
+ # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
+ # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
+ # the database.
+ tool_shed_accessible = True
+ missing_tool_configs_dict = odict()
+ if tool_shed_accessible:
+ if len( self.proprietary_tool_confs ) == 1:
+ plural = ''
+ file_names = self.proprietary_tool_confs[ 0 ]
+ else:
+ plural = 's'
+ file_names = ', '.join( self.proprietary_tool_confs )
+ if missing_tool_configs_dict:
+ for repository_elem in root:
+ self.install_repository( repository_elem, install_dependencies )
+ else:
+ message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
+ message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
+ print message
+ else:
+ message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
+ message += "Try again later.\n"
+ print message
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
if self.shed_config_dict.get( 'tool_path' ):
relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
@@ -144,7 +175,8 @@
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
else:
- print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
+ print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
+ % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
@@ -315,20 +347,9 @@
update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
- # Get the non-shed related tool panel config file names from the Galaxy config - the default is tool_conf.xml.
- config_filenames = []
- for config_filename in self.app.config.tool_configs:
- # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
- # <toolbox tool_path="../shed_tools">
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
- tool_path = root.get( 'tool_path', None )
- if tool_path is None:
- config_filenames.append( config_filename )
- return config_filenames
+ return get_non_shed_tool_panel_configs( self.app )
def __get_url_from_tool_shed( self, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu
- # We need the URL to this tool shed, which is something like:
+ # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
# http://toolshed.g2.bx.psu.edu/
for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
if shed_url.find( tool_shed ) >= 0:
diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -1,103 +1,13 @@
-import sys, os, ConfigParser, urllib2
+import sys, os, ConfigParser
import galaxy.config
import galaxy.datatypes.registry
-from galaxy import util, tools
+from galaxy import tools
import galaxy.model.mapping
import galaxy.tools.search
from galaxy.objectstore import build_object_store_from_config
+from galaxy.tool_shed.common_util import *
import galaxy.tool_shed.tool_shed_registry
from galaxy.tool_shed import install_manager
-from galaxy.tool_shed.encoding_util import *
-from galaxy.util.odict import odict
-
-REPOSITORY_OWNER = 'devteam'
-
-def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
- # Get the 000x_tools.xml file associated with the current migrate_tools version number.
- tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
- # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
- migrated_tool_configs_dict = odict()
- tree = util.parse_xml( tools_xml_file_path )
- root = tree.getroot()
- tool_shed = root.get( 'name' )
- tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- if tool_shed_url:
- for elem in root:
- if elem.tag == 'repository':
- tool_dependencies = []
- tool_dependencies_dict = {}
- repository_name = elem.get( 'name' )
- changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
- ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
- try:
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- tool_shed_accessible = True
- except Exception, e:
- # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
- tool_shed_accessible = False
- print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
- if tool_shed_accessible:
- if text:
- tool_dependencies_dict = tool_shed_decode( text )
- for dependency_key, requirements_dict in tool_dependencies_dict.items():
- tool_dependency_name = requirements_dict[ 'name' ]
- tool_dependency_version = requirements_dict[ 'version' ]
- tool_dependency_type = requirements_dict[ 'type' ]
- tool_dependency_readme = requirements_dict.get( 'readme', '' )
- tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
- for tool_elem in elem.findall( 'tool' ):
- migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
- if tool_shed_accessible:
- # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
- missing_tool_configs_dict = odict()
- for tool_panel_config in tool_panel_configs:
- tree = util.parse_xml( tool_panel_config )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- elif elem.tag == 'section':
- for section_elem in elem:
- if section_elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- else:
- exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
- exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
- raise Exception( exception_msg )
- return tool_shed_accessible, missing_tool_configs_dict
-def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
- file_path = elem.get( 'file', None )
- if file_path:
- path, name = os.path.split( file_path )
- if name in migrated_tool_configs_dict:
- tool_dependencies = migrated_tool_configs_dict[ name ]
- missing_tool_configs_dict[ name ] = tool_dependencies
- return missing_tool_configs_dict
-def get_non_shed_tool_panel_configs( app ):
- # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
- config_filenames = []
- for config_filename in app.config.tool_configs:
- # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
- # <toolbox tool_path="../shed_tools">
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
- tool_path = root.get( 'tool_path', None )
- if tool_path is None:
- config_filenames.append( config_filename )
- return config_filenames
-def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
- search_str = '://%s' % tool_shed
- for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( search_str ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- return None
class MigrateToolsApplication( object ):
"""Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
diff -r 4379576e8199c18b11ec448f090e8300bc0f372d -r 0a20563f9d46749d041c079907492bf9d4866628 scripts/migrate_tools/migrate_tools.py
--- a/scripts/migrate_tools/migrate_tools.py
+++ b/scripts/migrate_tools/migrate_tools.py
@@ -26,8 +26,8 @@
else:
plural = 's'
file_names = ', '.join( non_shed_tool_confs )
-msg = "\nThe installation process is finished. You should now remove entries for the installed tools from your file%s named\n" % plural
-msg += "%s and start your Galaxy server.\n" % file_names
+msg = "\nThe installation process is finished. If any tools associated with this migration were defined in your file%s named\n" % plural
+msg += "%s, then you should remove entries for them and start your Galaxy server.\n" % file_names
print msg
app.shutdown()
sys.exit( 0 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d10de4954af2/
changeset: d10de4954af2
user: greg
date: 2012-11-07 16:51:35
summary: Since tool migration scripts can be executed any number of times, make sure that no repositories are installed if not tools associated with the migration are defined in the tool_conf.xml file (or equivalent). This fix is associated only with the recently introduced Galaxy admin UI feature displaying the list of migration stages currently available. This is the way the migration process at server startup has always worked so no changes are needed in that scenario.
affected #: 4 files
diff -r 6971bbc8bd8418baea51345b891e1aa647d14a88 -r d10de4954af2e94cb7c517ae6398d0247843168e lib/galaxy/tool_shed/common_util.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/common_util.py
@@ -0,0 +1,93 @@
+import os, urllib2
+from galaxy import util
+from galaxy.util.odict import odict
+from galaxy.tool_shed.encoding_util import *
+
+REPOSITORY_OWNER = 'devteam'
+
+def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
+ # Get the 000x_tools.xml file associated with the current migrate_tools version number.
+ tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
+ # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
+ migrated_tool_configs_dict = odict()
+ tree = util.parse_xml( tools_xml_file_path )
+ root = tree.getroot()
+ tool_shed = root.get( 'name' )
+ tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
+ # The default behavior is that the tool shed is down.
+ tool_shed_accessible = False
+ if tool_shed_url:
+ for elem in root:
+ if elem.tag == 'repository':
+ tool_dependencies = []
+ tool_dependencies_dict = {}
+ repository_name = elem.get( 'name' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
+ ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
+ try:
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ tool_shed_accessible = True
+ except Exception, e:
+ # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
+ tool_shed_accessible = False
+ print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
+ if tool_shed_accessible:
+ if text:
+ tool_dependencies_dict = tool_shed_decode( text )
+ for dependency_key, requirements_dict in tool_dependencies_dict.items():
+ tool_dependency_name = requirements_dict[ 'name' ]
+ tool_dependency_version = requirements_dict[ 'version' ]
+ tool_dependency_type = requirements_dict[ 'type' ]
+ tool_dependency_readme = requirements_dict.get( 'readme', '' )
+ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
+ for tool_elem in elem.findall( 'tool' ):
+ migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
+ if tool_shed_accessible:
+ # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
+ missing_tool_configs_dict = odict()
+ for tool_panel_config in tool_panel_configs:
+ tree = util.parse_xml( tool_panel_config )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ else:
+ exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
+ exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
+ raise Exception( exception_msg )
+ return tool_shed_accessible, missing_tool_configs_dict
+def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
+ file_path = elem.get( 'file', None )
+ if file_path:
+ path, name = os.path.split( file_path )
+ if name in migrated_tool_configs_dict:
+ tool_dependencies = migrated_tool_configs_dict[ name ]
+ missing_tool_configs_dict[ name ] = tool_dependencies
+ return missing_tool_configs_dict
+def get_non_shed_tool_panel_configs( app ):
+ # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
+ config_filenames = []
+ for config_filename in app.config.tool_configs:
+ # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
+ # <toolbox tool_path="../shed_tools">
+ tree = util.parse_xml( config_filename )
+ root = tree.getroot()
+ tool_path = root.get( 'tool_path', None )
+ if tool_path is None:
+ config_filenames.append( config_filename )
+ return config_filenames
+def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
+ search_str = '://%s' % tool_shed
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( search_str ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ return None
diff -r 6971bbc8bd8418baea51345b891e1aa647d14a88 -r d10de4954af2e94cb7c517ae6398d0247843168e lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,8 +7,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
-
-REPOSITORY_OWNER = 'devteam'
+from galaxy.tool_shed.common_util import *
class InstallManager( object ):
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
@@ -19,10 +18,11 @@
self.app = app
self.toolbox = self.app.toolbox
self.migrated_tools_config = migrated_tools_config
- # If install_dependencies is True, but tool_dependency_dir is not set,
- # do not attempt to install, but print informative error message
+ # If install_dependencies is True but tool_dependency_dir is not set, do not attempt to install but print informative error message.
if install_dependencies and app.config.tool_dependency_dir is None:
- raise Exception( 'You are attempting to install tool dependencies, but do not have a value for "tool_dependency_dir" set in your ini file. Please set this to the path where you would like to install dependencies and rerun the migration script.' )
+ message = 'You are attempting to install tool dependencies but do not have a value for "tool_dependency_dir" set in your universe_wsgi.ini '
+ message += 'file. Set this location value to the path where you want tool dependencies installed and rerun the migration script.'
+ raise Exception( message )
# Get the local non-shed related tool panel configs (there can be more than one, and the default name is tool_conf.xml).
self.proprietary_tool_confs = self.non_shed_tool_panel_configs
self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
@@ -38,8 +38,39 @@
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = REPOSITORY_OWNER
index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config )
- for repository_elem in root:
- self.install_repository( repository_elem, install_dependencies )
+ # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
+ # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
+ # The default behavior is that the tool shed is down.
+ tool_shed_accessible = False
+ tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ if tool_panel_configs:
+ # The missing_tool_configs_dict contents are something like:
+ # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+ tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ else:
+ # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
+ # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
+ # the database.
+ tool_shed_accessible = True
+ missing_tool_configs_dict = odict()
+ if tool_shed_accessible:
+ if len( self.proprietary_tool_confs ) == 1:
+ plural = ''
+ file_names = self.proprietary_tool_confs[ 0 ]
+ else:
+ plural = 's'
+ file_names = ', '.join( self.proprietary_tool_confs )
+ if missing_tool_configs_dict:
+ for repository_elem in root:
+ self.install_repository( repository_elem, install_dependencies )
+ else:
+ message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
+ message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
+ print message
+ else:
+ message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
+ message += "Try again later.\n"
+ print message
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
if self.shed_config_dict.get( 'tool_path' ):
relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
@@ -144,7 +175,8 @@
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
else:
- print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
+ print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \
+ % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) )
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
@@ -315,20 +347,9 @@
update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
- # Get the non-shed related tool panel config file names from the Galaxy config - the default is tool_conf.xml.
- config_filenames = []
- for config_filename in self.app.config.tool_configs:
- # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
- # <toolbox tool_path="../shed_tools">
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
- tool_path = root.get( 'tool_path', None )
- if tool_path is None:
- config_filenames.append( config_filename )
- return config_filenames
+ return get_non_shed_tool_panel_configs( self.app )
def __get_url_from_tool_shed( self, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu
- # We need the URL to this tool shed, which is something like:
+ # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
# http://toolshed.g2.bx.psu.edu/
for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
if shed_url.find( tool_shed ) >= 0:
diff -r 6971bbc8bd8418baea51345b891e1aa647d14a88 -r d10de4954af2e94cb7c517ae6398d0247843168e lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -1,103 +1,13 @@
-import sys, os, ConfigParser, urllib2
+import sys, os, ConfigParser
import galaxy.config
import galaxy.datatypes.registry
-from galaxy import util, tools
+from galaxy import tools
import galaxy.model.mapping
import galaxy.tools.search
from galaxy.objectstore import build_object_store_from_config
+from galaxy.tool_shed.common_util import *
import galaxy.tool_shed.tool_shed_registry
from galaxy.tool_shed import install_manager
-from galaxy.tool_shed.encoding_util import *
-from galaxy.util.odict import odict
-
-REPOSITORY_OWNER = 'devteam'
-
-def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
- # Get the 000x_tools.xml file associated with the current migrate_tools version number.
- tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
- # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
- migrated_tool_configs_dict = odict()
- tree = util.parse_xml( tools_xml_file_path )
- root = tree.getroot()
- tool_shed = root.get( 'name' )
- tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- if tool_shed_url:
- for elem in root:
- if elem.tag == 'repository':
- tool_dependencies = []
- tool_dependencies_dict = {}
- repository_name = elem.get( 'name' )
- changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
- ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
- try:
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- tool_shed_accessible = True
- except Exception, e:
- # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
- tool_shed_accessible = False
- print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
- if tool_shed_accessible:
- if text:
- tool_dependencies_dict = tool_shed_decode( text )
- for dependency_key, requirements_dict in tool_dependencies_dict.items():
- tool_dependency_name = requirements_dict[ 'name' ]
- tool_dependency_version = requirements_dict[ 'version' ]
- tool_dependency_type = requirements_dict[ 'type' ]
- tool_dependency_readme = requirements_dict.get( 'readme', '' )
- tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
- for tool_elem in elem.findall( 'tool' ):
- migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
- if tool_shed_accessible:
- # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
- missing_tool_configs_dict = odict()
- for tool_panel_config in tool_panel_configs:
- tree = util.parse_xml( tool_panel_config )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- elif elem.tag == 'section':
- for section_elem in elem:
- if section_elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- else:
- exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
- exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
- raise Exception( exception_msg )
- return tool_shed_accessible, missing_tool_configs_dict
-def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
- file_path = elem.get( 'file', None )
- if file_path:
- path, name = os.path.split( file_path )
- if name in migrated_tool_configs_dict:
- tool_dependencies = migrated_tool_configs_dict[ name ]
- missing_tool_configs_dict[ name ] = tool_dependencies
- return missing_tool_configs_dict
-def get_non_shed_tool_panel_configs( app ):
- # Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
- config_filenames = []
- for config_filename in app.config.tool_configs:
- # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
- # <toolbox tool_path="../shed_tools">
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
- tool_path = root.get( 'tool_path', None )
- if tool_path is None:
- config_filenames.append( config_filename )
- return config_filenames
-def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
- search_str = '://%s' % tool_shed
- for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( search_str ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- return None
class MigrateToolsApplication( object ):
"""Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
diff -r 6971bbc8bd8418baea51345b891e1aa647d14a88 -r d10de4954af2e94cb7c517ae6398d0247843168e scripts/migrate_tools/migrate_tools.py
--- a/scripts/migrate_tools/migrate_tools.py
+++ b/scripts/migrate_tools/migrate_tools.py
@@ -26,8 +26,8 @@
else:
plural = 's'
file_names = ', '.join( non_shed_tool_confs )
-msg = "\nThe installation process is finished. You should now remove entries for the installed tools from your file%s named\n" % plural
-msg += "%s and start your Galaxy server.\n" % file_names
+msg = "\nThe installation process is finished. If any tools associated with this migration were defined in your file%s named\n" % plural
+msg += "%s, then you should remove entries for them and start your Galaxy server.\n" % file_names
print msg
app.shutdown()
sys.exit( 0 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6971bbc8bd84/
changeset: 6971bbc8bd84
user: dan
date: 2012-11-07 16:45:10
summary: Change indent from 3 spaces to 4 for 8171:4b62ed46e5b6.
affected #: 1 file
diff -r 4b62ed46e5b6f5448ec262e445eefd0b17ef4898 -r 6971bbc8bd8418baea51345b891e1aa647d14a88 scripts/set_metadata.py
--- a/scripts/set_metadata.py
+++ b/scripts/set_metadata.py
@@ -14,9 +14,9 @@
# ensure supported version
from check_python import check_python
try:
- check_python()
+ check_python()
except:
- sys.exit(1)
+ sys.exit(1)
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4b62ed46e5b6/
changeset: 4b62ed46e5b6
user: dan
date: 2012-11-07 16:35:56
summary: Use check_python() to verify python version in set_metadata.py instead of hardcoded minimum version check.
affected #: 1 file
diff -r 4ce18911037cc4297a84dfcfe81f36ccac98d425 -r 4b62ed46e5b6f5448ec262e445eefd0b17ef4898 scripts/set_metadata.py
--- a/scripts/set_metadata.py
+++ b/scripts/set_metadata.py
@@ -11,7 +11,12 @@
log = logging.getLogger( __name__ )
import os, sys, cPickle
-assert sys.version_info[:2] >= ( 2, 4 )
+# ensure supported version
+from check_python import check_python
+try:
+ check_python()
+except:
+ sys.exit(1)
new_path = [ os.path.join( os.getcwd(), "lib" ) ]
new_path.extend( sys.path[1:] ) # remove scripts/ from the path
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4ce18911037c/
changeset: 4ce18911037c
user: dan
date: 2012-11-07 15:52:43
summary: When extracting a workflow from a history, provide a warning message if the tool version for the job does not match the tool version of the currently loaded tool.
affected #: 1 file
diff -r 6ff28399645ae158a03cec4b32c15a78da9b8018 -r 4ce18911037cc4297a84dfcfe81f36ccac98d425 templates/workflow/build_from_current_history.mako
--- a/templates/workflow/build_from_current_history.mako
+++ b/templates/workflow/build_from_current_history.mako
@@ -1,4 +1,5 @@
<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" /><% _=n_ %>
@@ -111,6 +112,10 @@
disabled = True
else:
disabled = False
+ if tool and tool.version != job.tool_version:
+ tool_version_warning = 'Dataset was created with tool version "%s", but workflow extraction will use version "%s".' % ( job.tool_version, tool.version )
+ else:
+ tool_version_warning = ''
%><tr>
@@ -123,6 +128,9 @@
<div style="font-style: italic; color: gray">This tool cannot be used in workflows</div>
%else:
<div><input type="checkbox" name="job_ids" value="${job.id}" checked="true" />Include "${tool_name}" in workflow</div>
+ %if tool_version_warning:
+ ${ render_msg( tool_version_warning, status="warning" ) }
+ %endif
%endif
</div></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6ff28399645a/
changeset: 6ff28399645a
user: dan
date: 2012-11-07 15:30:46
summary: When getting job parameters for extracting a workflow from a history set ignore_errors to True. Prevents traceback when e.g. a tool was updated and had a text value changed to an integer.
affected #: 1 file
diff -r af1d4e5a8b6325ea311513ffe9d671fe2b87764e -r 6ff28399645ae158a03cec4b32c15a78da9b8018 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1260,7 +1260,7 @@
assert job_id in jobs_by_id, "Attempt to create workflow with job not connected to current history"
job = jobs_by_id[ job_id ]
tool = trans.app.toolbox.get_tool( job.tool_id )
- param_values = job.get_param_values( trans.app )
+ param_values = job.get_param_values( trans.app, ignore_errors=True ) #If a tool was updated and e.g. had a text value changed to an integer, we don't want a traceback here
associations = cleanup_param_values( tool.inputs, param_values )
step = model.WorkflowStep()
step.type = 'tool'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/af1d4e5a8b63/
changeset: af1d4e5a8b63
user: clements
date: 2012-11-01 09:23:56
summary: More work on docstrings to get them to be Sphinx compatible. Added dependency to Sphinx Makefile to get rid of errors when build files weren't there.
affected #: 5 files
diff -r b0c9163490705bd872c2877070196056bc4e872e -r af1d4e5a8b6325ea311513ffe9d671fe2b87764e doc/Makefile
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -14,8 +14,22 @@
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# Galaxy Local variables
+
+TOOLDATASHAREDDIR = ../tool-data/shared
+TOOLDATABUILDFILES = $(TOOLDATASHAREDDIR)/ensembl/builds.txt \
+ $(TOOLDATASHAREDDIR)/ncbi/builds.txt \
+ $(TOOLDATASHAREDDIR)/ucsc/publicbuilds.txt
+
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+# Sphinx wants the build files to be there; Copy the sample files into
+# place if we don't already have the build files.
+$(TOOLDATABUILDFILES) :
+ /bin/cp $@.sample $@
+
+
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@@ -41,7 +55,7 @@
clean:
-rm -rf $(BUILDDIR)/*
-html:
+html: $(TOOLDATABUILDFILES)
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
diff -r b0c9163490705bd872c2877070196056bc4e872e -r af1d4e5a8b6325ea311513ffe9d671fe2b87764e lib/galaxy/datatypes/assembly.py
--- a/lib/galaxy/datatypes/assembly.py
+++ b/lib/galaxy/datatypes/assembly.py
@@ -25,25 +25,26 @@
# It should call get_headers() like other sniff methods.
"""
Determines whether the file is an amos assembly file format
- Example:
- {CTG
- iid:1
- eid:1
- seq:
- CCTCTCCTGTAGAGTTCAACCGA-GCCGGTAGAGTTTTATCA
- .
- qlt:
- DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD
- .
- {TLE
- src:1027
- off:0
- clr:618,0
- gap:
- 250 612
- .
- }
- }
+ Example::
+
+ {CTG
+ iid:1
+ eid:1
+ seq:
+ CCTCTCCTGTAGAGTTCAACCGA-GCCGGTAGAGTTTTATCA
+ .
+ qlt:
+ DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD
+ .
+ {TLE
+ src:1027
+ off:0
+ clr:618,0
+ gap:
+ 250 612
+ .
+ }
+ }
"""
isAmos = False
try:
@@ -68,7 +69,8 @@
def sniff( self, filename ):
"""
Determines whether the file is a velveth produced fasta format
- The id line has 3 fields separated by tabs: sequence_name sequence_index cataegory
+ The id line has 3 fields separated by tabs: sequence_name sequence_index cataegory::
+
>SEQUENCE_0_length_35 1 1
GGATATAGGGCCAACCCAACTCAACGGCCTGTCTT
>SEQUENCE_1_length_35 2 1
@@ -103,7 +105,7 @@
def sniff( self, filename ):
"""
- Determines whether the file is a velveth produced RoadMap
+ Determines whether the file is a velveth produced RoadMap::
142858 21 1
ROADMAP 1
ROADMAP 2
diff -r b0c9163490705bd872c2877070196056bc4e872e -r af1d4e5a8b6325ea311513ffe9d671fe2b87764e lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -797,13 +797,14 @@
path, name = os.path.split(__file__)
full_path = os.path.join( path, 'test', fname )
return full_path
+
def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5, skipchars=[] ):
"""
- Returns the first LINE_COUNT lines wrapped to WIDTH
+ Returns the first LINE_COUNT lines wrapped to WIDTH::
- ## >>> fname = get_test_fname('4.bed')
- ## >>> get_file_peek(fname)
- ## 'chr22 30128507 31828507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +\n'
+ ## >>> fname = get_test_fname('4.bed')
+ ## >>> get_file_peek(fname)
+ ## 'chr22 30128507 31828507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +\n'
"""
# Set size for file.readline() to a negative number to force it to
# read until either a newline or EOF. Needed for datasets with very
diff -r b0c9163490705bd872c2877070196056bc4e872e -r af1d4e5a8b6325ea311513ffe9d671fe2b87764e lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -67,14 +67,18 @@
"""
from the ever-helpful angie hinrichs angie(a)soe.ucsc.edu
a genome graphs call looks like this
+
http://genome.ucsc.edu/cgi-bin/hgGenome?clade=mammal&org=Human&db=hg18&hgGe…
- &hgGenome_dataSetDescription=test&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess
+ &hgGenome_dataSetDescription=test&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess
&hgGenome_columnLabels=best%20guess&hgGenome_maxVal=&hgGenome_labelVals=
&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=http://galaxy.esphealth.org/datasets/333/display/index
&hgGenome_doSubmitUpload=submit
- Galaxy gives this for an interval file
+
+ Galaxy gives this for an interval file
+
http://genome.ucsc.edu/cgi-bin/hgTracks?db=hg18&position=chr1:1-1000&hgt.cu…
http%3A%2F%2Fgalaxy.esphealth.org%2Fdisplay_as%3Fid%3D339%26display_app%3Ducsc
+
"""
ret_val = []
ggtail = 'hgGenome_doSubmitUpload=submit'
diff -r b0c9163490705bd872c2877070196056bc4e872e -r af1d4e5a8b6325ea311513ffe9d671fe2b87764e lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py
+++ b/lib/galaxy/datatypes/sequence.py
@@ -25,11 +25,14 @@
"""
Class storing information about a sequence file composed of multiple gzip files concatenated as
one OR an uncompressed file. In the GZIP case, each sub-file's location is stored in start and end.
- The format of the file is JSON:
- { "sections" : [
- { "start" : "x", "end" : "y", "sequences" : "z" },
- ...
- ]}
+
+ The format of the file is JSON::
+
+ { "sections" : [
+ { "start" : "x", "end" : "y", "sequences" : "z" },
+ ...
+ ]}
+
"""
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
@@ -224,12 +227,19 @@
For complete details see http://www.ncbi.nlm.nih.gov/blast/fasta.shtml
Rules for sniffing as True:
+
We don't care about line length (other than empty lines).
+
The first non-empty line must start with '>' and the Very Next line.strip() must have sequence data and not be a header.
+
'sequence data' here is loosely defined as non-empty lines which do not start with '>'
+
This will cause Color Space FASTA (csfasta) to be detected as True (they are, after all, still FASTA files - they have a header line followed by sequence data)
+
Previously this method did some checking to determine if the sequence data had integers (presumably to differentiate between fasta and csfasta)
+
This should be done through sniff order, where csfasta (currently has a null sniff function) is detected for first (stricter definition) followed sometime after by fasta
+
We will only check that the first purported sequence is correctly formatted.
>>> fname = get_test_fname( 'sequence.maf' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b0c916349070/
changeset: b0c916349070
user: clements
date: 2012-11-01 07:08:09
summary: Update docstrings so they no longer generate warnings in Sphix.
affected #: 2 files
diff -r f2abe519d05e6f4e01fcf9983ab3fb72f5009272 -r b0c9163490705bd872c2877070196056bc4e872e lib/galaxy/datatypes/util/gff_util.py
--- a/lib/galaxy/datatypes/util/gff_util.py
+++ b/lib/galaxy/datatypes/util/gff_util.py
@@ -119,12 +119,13 @@
Reader wrapper for GFF files.
Wrapper has two major functions:
- (1) group entries for GFF file (via group column), GFF3 (via id attribute),
- or GTF (via gene_id/transcript id);
- (2) convert coordinates from GFF format--starting and ending coordinates
- are 1-based, closed--to the 'traditional'/BED interval format--0 based,
- half-open. This is useful when using GFF files as inputs to tools that
- expect traditional interval format.
+
+ 1. group entries for GFF file (via group column), GFF3 (via id attribute),
+ or GTF (via gene_id/transcript id);
+ 2. convert coordinates from GFF format--starting and ending coordinates
+ are 1-based, closed--to the 'traditional'/BED interval format--0 based,
+ half-open. This is useful when using GFF files as inputs to tools that
+ expect traditional interval format.
"""
def __init__( self, reader, chrom_col=0, feature_col=2, start_col=3, \
@@ -303,9 +304,13 @@
"""
Parses a GFF/GTF attribute string and returns a dictionary of name-value
pairs. The general format for a GFF3 attributes string is
+
name1=value1;name2=value2
+
The general format for a GTF attribute string is
+
name1 "value1" ; name2 "value2"
+
The general format for a GFF attribute string is a single string that
denotes the interval's group; in this case, method returns a dictionary
with a single key-value pair, and key name is 'group'
@@ -412,4 +417,4 @@
for chrom_features in chroms_features_sorted:
for feature in chrom_features:
yield feature
-
\ No newline at end of file
+
diff -r f2abe519d05e6f4e01fcf9983ab3fb72f5009272 -r b0c9163490705bd872c2877070196056bc4e872e lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -7,6 +7,7 @@
"""
Compose the sequence of commands necessary to execute a job. This will
currently include:
+
- environment settings corresponding to any requirement tags
- preparing input files
- command line taken from job wrapper
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.