1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/762d4010a9a5/
changeset: 762d4010a9a5
user: dan
date: 2012-11-08 16:52:45
summary: Provide a warning message when uploading files to a toolshed repository and a tool_dependencies.xml has been provided, but tool_dependencies metadata has not been generated.
affected #: 1 file
diff -r d3054b066218eacd366e618cc97535d059d8bf6a -r 762d4010a9a534ce5237d7a0bb6d317a9d9501ac lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -160,6 +160,7 @@
# Get the new repository tip.
if tip == repository.tip:
message = 'No changes to repository. '
+ status = 'warning'
else:
if ( isgzip or isbz2 ) and uncompress_file:
uncompress_str = ' uncompressed and '
@@ -182,6 +183,16 @@
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
+ #provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to e.g. a requirement tag mismatch
+ if get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
+ if repository.metadata_revisions:
+ metadata_dict = repository.metadata_revisions[0].metadata
+ else:
+ metadata_dict = {}
+ if 'tool_dependencies' not in metadata_dict:
+ message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml". '
+ status = 'warning'
+ log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d3054b066218/
changeset: d3054b066218
user: greg
date: 2012-11-08 16:32:30
summary: Add the new ched_tool_data_table_config setting to the tool shed's config object.
affected #: 1 file
diff -r b5ce9451c5d190f0507f3b2c484afe937f12a045 -r d3054b066218eacd366e618cc97535d059d8bf6a lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -48,6 +48,7 @@
self.tool_secret = kwargs.get( "tool_secret", "" )
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "shed-tool-data" ), os.getcwd() )
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
+ self.shed_tool_data_table_config = resolve_path( kwargs.get( 'shed_tool_data_table_config', 'shed_tool_data_table_conf.xml' ), self.root )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
# Location for dependencies
if 'tool_dependency_dir' in kwargs:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b5ce9451c5d1/
changeset: b5ce9451c5d1
user: greg
date: 2012-11-08 16:01:08
summary: Attempt to make sure .sample files included in an installed tool shed repository are only copied to ~/tool-data if they are sample data index files.
affected #: 1 file
diff -r 5ff899b77dffce59580991a4b1d4dfbcdc091090 -r b5ce9451c5d190f0507f3b2c484afe937f12a045 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -7,6 +7,7 @@
from galaxy.web.form_builder import SelectField
from galaxy.tools import parameters
from galaxy.datatypes.checkers import *
+from galaxy.datatypes.sniff import is_column_based
from galaxy.util.json import *
from galaxy.util import inflector
from galaxy.tools.search import ToolBoxSearch
@@ -546,15 +547,20 @@
shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
- Copy all files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
- are contained in sample_files_copied. The default value for dest_path is ~/tool-data.
+ Copy all appropriate files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
+ are contained in sample_files_copied. The default value for dest_path is ~/tool-data. We need to be careful to copy only appropriate
+ files here because tool shed repositories can contain files ending in .sample that should not be copied to the ~/tool-data directory.
"""
+ filenames_not_to_copy = [ 'tool_data_table_conf.xml.sample' ]
sample_files_copied = util.listify( sample_files_copied )
for filename in sample_files:
- if filename not in sample_files_copied:
+ filename_sans_path = os.path.split( filename )[ 1 ]
+ if filename_sans_path not in filenames_not_to_copy and filename not in sample_files_copied:
if tool_path:
filename=os.path.join( tool_path, filename )
- copy_sample_file( app, filename, dest_path=dest_path )
+ # Attempt to ensure we're copying an appropriate file.
+ if is_data_index_sample_file( filename ):
+ copy_sample_file( app, filename, dest_path=dest_path )
def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
repo_info_dict = {}
repo_info_dict[ repository.name ] = ( repository.description,
@@ -1539,9 +1545,11 @@
if shed_tool_conf == file_name:
return index, shed_tool_conf_dict
def get_tool_index_sample_files( sample_files ):
+ """Try to return the list of all appropriate tool data sample files included in the repository."""
tool_index_sample_files = []
for s in sample_files:
- if s.endswith( '.loc.sample' ):
+ # The problem with this is that Galaxy does not follow a standard naming convention for file names.
+ if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
tool_index_sample_files.append( s )
return tool_index_sample_files
def get_tool_dependency( trans, id ):
@@ -1846,6 +1854,29 @@
parent_id=tool_version_using_parent_id.id )
sa_session.add( tool_version_association )
sa_session.flush()
+def is_data_index_sample_file( file_path ):
+ """
+ Attempt to determine if a .sample file is appropriate for copying to ~/tool-data when a tool shed repository is being installed
+ into a Galaxy instance.
+ """
+ # Currently most data index files are tabular, so check that first. We'll assume that if the file is tabular, it's ok to copy.
+ if is_column_based( file_path ):
+ return True
+ # If the file is any of the following, don't copy it.
+ if check_html( file_path ):
+ return False
+ if check_image( file_path ):
+ return False
+ if check_binary( name=file_path ):
+ return False
+ if is_bz2( file_path ):
+ return False
+ if is_gzip( file_path ):
+ return False
+ if check_zip( file_path ):
+ return False
+ # Default to copying the file if none of the above are true.
+ return True
def is_downloadable( metadata_dict ):
return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5ff899b77dff/
changeset: 5ff899b77dff
user: dan
date: 2012-11-08 03:16:17
summary: Add shed_tool_data_table_conf.xml.sample to buildbot_setup.sh.
affected #: 1 file
diff -r 982b9522efca2abf46637870e06c7abf1204be6d -r 5ff899b77dffce59580991a4b1d4dfbcdc091090 buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -68,6 +68,7 @@
datatypes_conf.xml.sample
universe_wsgi.ini.sample
tool_data_table_conf.xml.sample
+shed_tool_data_table_conf.xml.sample
migrated_tools_conf.xml.sample
tool-data/shared/ensembl/builds.txt.sample
tool-data/shared/igv/igv_build_sites.txt.sample
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ec51a727a497/
changeset: ec51a727a497
user: clements
date: 2012-11-02 07:25:39
summary: Modified docstrings so that Sphinx would not complain about them. However, I couldn't get Sphinx to be happy with all docstrings, so we are still getting 10 warnings (down from over 70 though).
affected #: 25 files
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
--- a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
+++ b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
@@ -6,11 +6,13 @@
def main():
"""
- The format of the file is JSON:
- { "sections" : [
- { "start" : "x", "end" : "y", "sequences" : "z" },
- ...
- ]}
+ The format of the file is JSON::
+
+ { "sections" : [
+ { "start" : "x", "end" : "y", "sequences" : "z" },
+ ...
+ ]}
+
This works only for UNCOMPRESSED fastq files. The Python GzipFile does not provide seekable
offsets via tell(), so clients just have to split the slow way
"""
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/datatypes/converters/interval_to_fli.py
--- a/lib/galaxy/datatypes/converters/interval_to_fli.py
+++ b/lib/galaxy/datatypes/converters/interval_to_fli.py
@@ -1,12 +1,16 @@
'''
Creates a feature location index (FLI) for a given BED/GFF file.
-FLI index has the form:
+FLI index has the form::
+
[line_length]
<symbol1_in_lowercase><tab><symbol1><tab><location><symbol2_in_lowercase><tab><symbol2><tab><location>
...
+
where location is formatted as:
+
contig:start-end
+
and symbols are sorted in lexigraphical order.
'''
@@ -94,4 +98,4 @@
out.close()
if __name__ == '__main__':
- main()
\ No newline at end of file
+ main()
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py
--- a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py
+++ b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py
@@ -78,10 +78,14 @@
"""
need to work with rgenetics composite datatypes
so in and out are html files with data in extrafiles path
- <command interpreter="python">
- pbed_ldreduced_converter.py '$input1.extra_files_path/$input1.metadata.base_name' '$winsize' '$winmove' '$r2thresh'
- '$output1' '$output1.files_path' 'plink'
- </command>
+
+ .. raw:: xml
+
+ <command interpreter="python">
+ pbed_ldreduced_converter.py '$input1.extra_files_path/$input1.metadata.base_name' '$winsize' '$winmove' '$r2thresh'
+ '$output1' '$output1.files_path' 'plink'
+ </command>
+
"""
nparm = 7
if len(sys.argv) < nparm:
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -800,11 +800,12 @@
def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5, skipchars=[] ):
"""
- Returns the first LINE_COUNT lines wrapped to WIDTH::
+ Returns the first LINE_COUNT lines wrapped to WIDTH
- ## >>> fname = get_test_fname('4.bed')
- ## >>> get_file_peek(fname)
- ## 'chr22 30128507 31828507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +\n'
+ ## >>> fname = get_test_fname('4.bed')
+ ## >>> get_file_peek(fname)
+ ## 'chr22 30128507 31828507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +\n'
+
"""
# Set size for file.readline() to a negative number to force it to
# read until either a newline or EOF. Needed for datasets with very
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -46,6 +46,7 @@
process all data lines.
Items of interest:
+
1. We treat 'overwrite' as always True (we always want to set tabular metadata when called).
2. If a tabular file has no data, it will have one column of type 'str'.
3. We used to check only the first 100 lines when setting metadata and this class's
@@ -356,15 +357,18 @@
Determines whether the file is in SAM format
A file in SAM format consists of lines of tab-separated data.
- The following header line may be the first line:
- @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL
- or
- @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+ The following header line may be the first line::
+
+ @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL
+ or
+ @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+
Data in the OPT column is optional and can consist of tab-separated data
For complete details see http://samtools.sourceforge.net/SAM1.pdf
- Rules for sniffing as True:
+ Rules for sniffing as True::
+
There must be 11 or more columns of data on each line
Columns 2 (FLAG), 4(POS), 5 (MAPQ), 8 (MPOS), and 9 (ISIZE) must be numbers (9 can be negative)
We will only check that up to the first 5 alignments are correctly formatted.
@@ -579,10 +583,11 @@
A file in ELAND export format consists of lines of tab-separated data.
There is no header.
- Rules for sniffing as True:
- There must be 22 columns on each line
- LANE, TILEm X, Y, INDEX, READ_NO, SEQ, QUAL, POSITION, *STRAND, FILT must be correct
- We will only check that up to the first 5 alignments are correctly formatted.
+ Rules for sniffing as True::
+
+ - There must be 22 columns on each line
+ - LANE, TILEm X, Y, INDEX, READ_NO, SEQ, QUAL, POSITION, *STRAND, FILT must be correct
+ - We will only check that up to the first 5 alignments are correctly formatted.
"""
import gzip
try:
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -51,6 +51,7 @@
store, False otherwise.
FIELD DESCRIPTIONS (these apply to all the methods in this class):
+
:type obj: object
:param obj: A Galaxy object with an assigned database ID accessible via
the .id attribute.
@@ -118,6 +119,7 @@
"""
Deletes the object identified by `obj`.
See `exists` method for the description of other fields.
+
:type entire_dir: bool
:param entire_dir: If True, delete the entire directory pointed to by
extra_dir. For safety reasons, this option applies
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/quota/__init__.py
--- a/lib/galaxy/quota/__init__.py
+++ b/lib/galaxy/quota/__init__.py
@@ -41,6 +41,7 @@
def get_quota( self, user, nice_size=False ):
"""
Calculated like so:
+
1. Anonymous users get the default quota.
2. Logged in users start with the highest of their associated '='
quotas or the default quota, if there are no associated '='
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py
+++ b/lib/galaxy/security/__init__.py
@@ -173,17 +173,21 @@
which the request is sent. We cannot use trans.user_is_admin() because the controller is
what is important since admin users do not necessarily have permission to do things
on items outside of the admin view.
+
If cntrller is from the admin side ( e.g., library_admin ):
- -if item is public, all roles, including private roles, are legitimate.
- -if item is restricted, legitimate roles are derived from the users and groups associated
- with each role that is associated with the access permission ( i.e., DATASET_MANAGE_PERMISSIONS or
- LIBRARY_MANAGE ) on item. Legitimate roles will include private roles.
+
+ - if item is public, all roles, including private roles, are legitimate.
+ - if item is restricted, legitimate roles are derived from the users and groups associated
+ with each role that is associated with the access permission ( i.e., DATASET_MANAGE_PERMISSIONS or
+ LIBRARY_MANAGE ) on item. Legitimate roles will include private roles.
+
If cntrller is not from the admin side ( e.g., root, library ):
- -if item is public, all non-private roles, except for the current user's private role,
- are legitimate.
- -if item is restricted, legitimate roles are derived from the users and groups associated
- with each role that is associated with the access permission on item. Private roles, except
- for the current user's private role, will be excluded.
+
+ - if item is public, all non-private roles, except for the current user's private role,
+ are legitimate.
+ - if item is restricted, legitimate roles are derived from the users and groups associated
+ with each role that is associated with the access permission on item. Private roles, except
+ for the current user's private role, will be excluded.
"""
admin_controller = cntrller in [ 'library_admin' ]
roles = set()
@@ -1063,9 +1067,10 @@
comma-separated string of folder ids. This method works with the show_library_item()
method below, and it returns libraries for which the received user has permission to
perform the received actions. Here is an example call to this method to return all
- libraries for which the received user has LIBRARY_ADD permission:
- libraries = trans.app.security_agent.get_permitted_libraries( trans, user,
- [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ] )
+ libraries for which the received user has LIBRARY_ADD permission::
+
+ libraries = trans.app.security_agent.get_permitted_libraries( trans, user,
+ [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ] )
"""
all_libraries = trans.sa_session.query( trans.app.model.Library ) \
.filter( trans.app.model.Library.table.c.deleted == False ) \
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -250,10 +250,11 @@
def set_environment( app, elem, tool_shed_repository ):
"""
Create a ToolDependency to set an environment variable. This is different from the process used to set an environment variable that is associated
- with a package. An example entry in a tool_dependencies.xml file is:
- <set_environment version="1.0">
- <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
- </set_environment>
+ with a package. An example entry in a tool_dependencies.xml file is::
+
+ <set_environment version="1.0">
+ <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+ </set_environment>
"""
sa_session = app.model.context.current
tool_dependency = None
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -112,16 +112,20 @@
def init_tools( self, config_filename ):
"""
Read the configuration file and load each tool. The following tags are currently supported:
- <toolbox>
- <tool file="data_source/upload.xml"/> # tools outside sections
- <label text="Basic Tools" id="basic_tools" /> # labels outside sections
- <workflow id="529fd61ab1c6cc36" /> # workflows outside sections
- <section name="Get Data" id="getext"> # sections
- <tool file="data_source/biomart.xml" /> # tools inside sections
- <label text="In Section" id="in_section" /> # labels inside sections
- <workflow id="adb5f5c93f827949" /> # workflows inside sections
- </section>
- </toolbox>
+
+ .. raw:: xml
+
+ <toolbox>
+ <tool file="data_source/upload.xml"/> # tools outside sections
+ <label text="Basic Tools" id="basic_tools" /> # labels outside sections
+ <workflow id="529fd61ab1c6cc36" /> # workflows outside sections
+ <section name="Get Data" id="getext"> # sections
+ <tool file="data_source/biomart.xml" /> # tools inside sections
+ <label text="In Section" id="in_section" /> # labels inside sections
+ <workflow id="adb5f5c93f827949" /> # workflows inside sections
+ </section>
+ </toolbox>
+
"""
if self.app.config.get_bool( 'enable_tool_tags', False ):
log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")" )
@@ -740,7 +744,8 @@
class ToolOutput( object ):
"""
Represents an output datasets produced by a tool. For backward
- compatibility this behaves as if it were the tuple:
+ compatibility this behaves as if it were the tuple::
+
(format, metadata_source, parent)
"""
@@ -1079,7 +1084,7 @@
else:
self.trackster_conf = None
def parse_inputs( self, root ):
- """
+ r"""
Parse the "<inputs>" element and create appropriate `ToolParameter`s.
This implementation supports multiple pages and grouping constructs.
"""
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -31,10 +31,11 @@
def load_from_config_file( self, config_filename, tool_data_path, from_shed_config=False ):
"""
This method is called under 3 conditions:
- 1) When the ToolDataTableManager is initialized (see __init__ above).
- 2) Just after the ToolDataTableManager is initialized and the additional entries defined by shed_tool_data_table_conf.xml
+
+ 1. When the ToolDataTableManager is initialized (see __init__ above).
+ 2. Just after the ToolDataTableManager is initialized and the additional entries defined by shed_tool_data_table_conf.xml
are being loaded into the ToolDataTableManager.data_tables.
- 3) When a tool shed repository that includes a tool_data_table_conf.xml.sample file is being installed into a local
+ 3. When a tool shed repository that includes a tool_data_table_conf.xml.sample file is being installed into a local
Galaxy instance. In this case, we have 2 entry types to handle, files whose root tag is <tables>, for example:
"""
tree = util.parse_xml( config_filename )
@@ -57,20 +58,24 @@
def add_new_entries_from_config_file( self, config_filename, tool_data_path, shed_tool_data_table_config, persist=False ):
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
- installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example:
- <tables>
+ installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example::
+
+ <tables>
+ <!-- Location of Tmap files -->
+ <table name="tmap_indexes" comment_char="#">
+ <columns>value, dbkey, name, path</columns>
+ <file path="tool-data/tmap_index.loc" />
+ </table>
+ </tables>
+
+ and files whose root tag is <table>, for example::
+
<!-- Location of Tmap files --><table name="tmap_indexes" comment_char="#"><columns>value, dbkey, name, path</columns><file path="tool-data/tmap_index.loc" /></table>
- </tables>
- and files whose root tag is <table>, for example:
- <!-- Location of Tmap files -->
- <table name="tmap_indexes" comment_char="#">
- <columns>value, dbkey, name, path</columns>
- <file path="tool-data/tmap_index.loc" />
- </table>
+
"""
tree = util.parse_xml( config_filename )
root = tree.getroot()
@@ -119,13 +124,14 @@
class TabularToolDataTable( ToolDataTable ):
"""
Data stored in a tabular / separated value format on disk, allows multiple
- files to be merged but all must have the same column definitions.
+ files to be merged but all must have the same column definitions::
- <table type="tabular" name="test">
- <column name='...' index = '...' />
- <file path="..." />
- <file path="..." />
- </table>
+ <table type="tabular" name="test">
+ <column name='...' index = '...' />
+ <file path="..." />
+ <file path="..." />
+ </table>
+
"""
type_key = 'tabular'
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py
+++ b/lib/galaxy/tools/parameters/dynamic_options.py
@@ -64,16 +64,20 @@
Type: data_meta
- When no 'from_' source has been specified in the <options> tag, this will populate the options list with (meta_value, meta_value, False).
+ When no 'from' source has been specified in the <options> tag, this will populate the options list with (meta_value, meta_value, False).
Otherwise, options which do not match the metadata value in the column are discarded.
Required Attributes:
- ref: Name of input dataset
- key: Metadata key to use for comparison
- column: column in options to compare with (not required when not associated with input options)
+
+ - ref: Name of input dataset
+ - key: Metadata key to use for comparison
+ - column: column in options to compare with (not required when not associated with input options)
+
Optional Attributes:
- multiple: Option values are multiple, split column by separator (True)
- separator: When multiple split by this (,)
+
+ - multiple: Option values are multiple, split column by separator (True)
+ - separator: When multiple split by this (,)
+
"""
def __init__( self, d_option, elem ):
Filter.__init__( self, d_option, elem )
@@ -132,12 +136,16 @@
Type: param_value
Required Attributes:
- ref: Name of input value
- column: column in options to compare with
+
+ - ref: Name of input value
+ - column: column in options to compare with
+
Optional Attributes:
- keep: Keep columns matching value (True)
- Discard columns matching value (False)
- ref_attribute: Period (.) separated attribute chain of input (ref) to use as value for filter
+
+ - keep: Keep columns matching value (True)
+ Discard columns matching value (False)
+ - ref_attribute: Period (.) separated attribute chain of input (ref) to use as value for filter
+
"""
def __init__( self, d_option, elem ):
Filter.__init__( self, d_option, elem )
@@ -294,13 +302,15 @@
Type: remove_value
- Required Attributes:
+ Required Attributes::
+
value: value to remove from select list
or
ref: param to refer to
or
meta_ref: dataset to refer to
key: metadata key to compare to
+
"""
def __init__( self, d_option, elem ):
Filter.__init__( self, d_option, elem )
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/util/heartbeat.py
--- a/lib/galaxy/util/heartbeat.py
+++ b/lib/galaxy/util/heartbeat.py
@@ -134,7 +134,9 @@
Scans a given backtrace stack frames, returns a single
quadraple of [filename, line, function-name, text] of
the single, deepest, most interesting frame.
- Interesting being:
+
+ Interesting being::
+
inside the galaxy source code ("/lib/galaxy"),
prefreably not an egg.
"""
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2397,11 +2397,13 @@
def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ):
"""
Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates
- to an installed tool shed repository. The original_dependency_dict is a single tool dependency definition, an example of which is:
- {"name": "bwa",
- "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
- "type": "package",
- "version": "0.6.2"}
+ to an installed tool shed repository. The original_dependency_dict is a single tool dependency definition, an example of which is::
+
+ {"name": "bwa",
+ "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
+ "type": "package",
+ "version": "0.6.2"}
+
The new_dependencies_dict is the dictionary generated by the generate_tool_dependency_metadata method.
"""
new_tool_dependency = None
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/util/topsort.py
--- a/lib/galaxy/util/topsort.py
+++ b/lib/galaxy/util/topsort.py
@@ -9,18 +9,24 @@
value is a list, representing a total ordering that respects all
the input constraints.
E.g.,
+
topsort( [(1,2), (3,3)] )
+
may return any of (but nothing other than)
+
[3, 1, 2]
[1, 3, 2]
[1, 2, 3]
+
because those are the permutations of the input elements that
respect the "1 precedes 2" and "3 precedes 3" input constraints.
Note that a constraint of the form (x, x) is really just a trick
to make sure x appears *somewhere* in the output list.
If there's a cycle in the constraints, say
+
topsort( [(1,2), (2,1)] )
+
then CycleError is raised, and the exception object supports
many methods to help analyze and break the cycles. This requires
a good deal more code than topsort itself!
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -610,11 +610,16 @@
def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
"""
- Returns a dict with the following attributes:
+ Returns a dict with the following attributes::
+
data - a list of variants with the format
+
+ .. raw:: text
+
[<guid>, <start>, <end>, <name>, cigar, seq]
message - error/informative message
+
"""
rval = []
message = None
@@ -893,13 +898,17 @@
def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
"""
- Returns a dict with the following attributes:
+ Returns a dict with the following attributes::
+
data - a list of reads with the format
- [<guid>, <start>, <end>, <name>, <read_1>, <read_2>, [empty], <mapq_scores>]
+ [<guid>, <start>, <end>, <name>, <read_1>, <read_2>, [empty], <mapq_scores>]
+
where <read_1> has the format
[<start>, <end>, <cigar>, <strand>, <read_seq>]
+
and <read_2> has the format
[<start>, <end>, <cigar>, <strand>, <read_seq>]
+
Field 7 is empty so that mapq scores' location matches that in single-end reads.
For single-end reads, read has format:
[<guid>, <start>, <end>, <name>, <cigar>, <strand>, <seq>, <mapq_score>]
@@ -910,6 +919,7 @@
max_low - lowest coordinate for the returned reads
max_high - highest coordinate for the returned reads
message - error/informative message
+
"""
# No iterator indicates no reads.
if iterator is None:
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
--- a/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
+++ b/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
@@ -112,9 +112,12 @@
def parseNode(self, string, depth):
""" Recursive method for parsing newick string, works by stripping down the string into substring
of newick contained with brackers, which is used to call itself.
- Eg ... ( A, B, (D, E)C, F, G ) ...
+
+ Eg ... ( A, B, (D, E)C, F, G ) ...
+
We will make the preceeding nodes first A, B, then the internal node C, its children D, E,
- and finally the succeeding nodes F, G"""
+ and finally the succeeding nodes F, G
+ """
# Base case where there is only an empty string
if string == "":
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -708,9 +708,12 @@
selected_value='none', refresh_on_change=False, multiple=False, display=None, size=None ):
"""
Build a SelectField given a set of objects. The received params are:
+
- objs: the set of objects used to populate the option list
- label_attr: the attribute of each obj (e.g., name, email, etc ) whose value is used to populate each option label.
+
- If the string 'self' is passed as label_attr, each obj in objs is assumed to be a string, so the obj itself is used
+
- select_field_name: the name of the SelectField
- initial_value: the value of the first option in the SelectField - allows for an option telling the user to select something
- selected_value: the value of the currently selected option
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -624,6 +624,7 @@
def handle_user_login( self, user ):
"""
Login a new user (possibly newly created)
+
- create a new session
- associate new session with user
- if old session had a history and it was not associated with a user, associate it with the new session,
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -140,7 +140,7 @@
def get_database_engine_options( kwargs ):
"""
Allow options for the SQLAlchemy database engine to be passed by using
- the prefix "database_engine_option_".
+ the prefix "database_engine_option".
"""
conversions = {
'convert_unicode': string_as_bool,
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/webapps/galaxy/api/genomes.py
--- a/lib/galaxy/webapps/galaxy/api/genomes.py
+++ b/lib/galaxy/webapps/galaxy/api/genomes.py
@@ -48,25 +48,25 @@
POST /api/genomes
Download and/or index a genome.
- Parameters:
+ Parameters::
- dbkey DB key of the build to download, ignored unless 'UCSC' is specified as the source
- ncbi_name NCBI's genome identifier, ignored unless NCBI is specified as the source
- ensembl_dbkey Ensembl's genome identifier, ignored unless Ensembl is specified as the source
- url_dbkey DB key to use for this build, ignored unless URL is specified as the source
- source Data source for this build. Can be: UCSC, Ensembl, NCBI, URL
- indexers POST array of indexers to run after downloading (indexers[] = first, indexers[] = second, ...)
- func Allowed values:
- 'download' Download and index
- 'index' Index only
-
- Returns:
+ dbkey DB key of the build to download, ignored unless 'UCSC' is specified as the source
+ ncbi_name NCBI's genome identifier, ignored unless NCBI is specified as the source
+ ensembl_dbkey Ensembl's genome identifier, ignored unless Ensembl is specified as the source
+ url_dbkey DB key to use for this build, ignored unless URL is specified as the source
+ source Data source for this build. Can be: UCSC, Ensembl, NCBI, URL
+ indexers POST array of indexers to run after downloading (indexers[] = first, indexers[] = second, ...)
+ func Allowed values:
+ 'download' Download and index
+ 'index' Index only
+
+ Returns::
- If no error:
- dict( status: 'ok', job: <job ID> )
+ If no error:
+ dict( status: 'ok', job: <job ID> )
- If error:
- dict( status: 'error', error: <error message> )
+ If error:
+ dict( status: 'error', error: <error message> )
"""
params = util.Params( payload )
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -13,12 +13,15 @@
@web.expose_api
def index( self, trans, **kwds ):
"""
- GET /api/tools: returns a list of tools defined by parameters
+ GET /api/tools: returns a list of tools defined by parameters::
+
parameters:
+
in_panel - if true, tools are returned in panel structure,
including sections and labels
trackster - if true, only tools that are compatible with
Trackster are returned
+
"""
# Read params.
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1980,8 +1980,8 @@
def edgelist_for_workflow_steps( steps ):
"""
- Create a list of tuples representing edges between `WorkflowSteps` based
- on associated `WorkflowStepConnection`s
+ Create a list of tuples representing edges between ``WorkflowSteps`` based
+ on associated ``WorkflowStepConnection``s
"""
edges = []
steps_to_index = dict( ( step, i ) for i, step in enumerate( steps ) )
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/galaxy/webapps/reports/config.py
--- a/lib/galaxy/webapps/reports/config.py
+++ b/lib/galaxy/webapps/reports/config.py
@@ -56,7 +56,7 @@
def get_database_engine_options( kwargs ):
"""
Allow options for the SQLAlchemy database engine to be passed by using
- the prefix "database_engine_option_".
+ the prefix "database_engine_option".
"""
conversions = {
'convert_unicode': string_as_bool,
diff -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 -r ec51a727a497d5912fdd4269571d7c26d7523436 lib/mimeparse.py
--- a/lib/mimeparse.py
+++ b/lib/mimeparse.py
@@ -39,18 +39,21 @@
return (type.strip(), subtype.strip(), params)
def parse_media_range(range):
- """Carves up a media range and returns a tuple of the
- (type, subtype, params) where 'params' is a dictionary
- of all the parameters for the media range.
- For example, the media range 'application/*;q=0.5' would
- get parsed into:
+ r"""
+ Carves up a media range and returns a tuple of the
+ (type, subtype, params) where 'params' is a dictionary
+ of all the parameters for the media range.
+ For example, the media range 'application/*;q=0.5' would
+ get parsed into:
- ('application', '*', {'q', '0.5'})
+ .. raw:: text
- In addition this function also guarantees that there
- is a value for 'q' in the params dictionary, filling it
- in with a proper default if necessary.
- """
+ ('application', '*', {'q', '0.5'})
+
+ In addition this function also guarantees that there
+ is a value for 'q' in the params dictionary, filling it
+ in with a proper default if necessary.
+ """
(type, subtype, params) = parse_mime_type(range)
if not params.has_key('q') or not params['q'] or \
not float(params['q']) or float(params['q']) > 1\
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bc5fa254bafc/
changeset: bc5fa254bafc
user: greg
date: 2012-11-07 22:36:28
summary: Maintain entries for Galaxy's ToolDataTableManager acquired from installed tool shed repositrories that contain a valid file named tool_data_table_conf.xmnl.sample in a separate config file name shed_tool_data_table_conf.xml. This will ensure that manual edits to the original tool_data_table_conf.xml will not be munged when Galaxy's tool shed installation process automatically adds entries into the file. This enhancement alos includes a bug fix where ToolDataTableEntries that should have been persisted to the XML file were not being handled correctly.
affected #: 11 files
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -76,8 +76,13 @@
self.genomes = Genomes( self )
# Data providers registry.
self.data_provider_registry = DataProviderRegistry()
- # Tool data tables
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path, self.config.tool_data_table_config_path )
+ # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( tool_data_path=self.config.tool_data_path,
+ config_filename=self.config.tool_data_table_config_path )
+ # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
+ self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
+ tool_data_path=self.tool_data_tables.tool_data_path,
+ from_shed_config=True )
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
if self.config.migrated_tools_config not in tool_configs:
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -66,6 +66,7 @@
tcf = 'tool_conf.xml'
self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
+ self.shed_tool_data_table_config = resolve_path( kwargs.get( 'shed_tool_data_table_config', 'shed_tool_data_table_conf.xml' ), self.root )
self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
try:
self.hours_between_check = int( kwargs.get( 'hours_between_check', 12 ) )
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -184,7 +184,8 @@
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
- updating_installed_repository=False )
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -41,8 +41,13 @@
self.datatypes_registry = galaxy.datatypes.registry.Registry()
# Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
- # Tool data tables
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path, self.config.tool_data_table_config_path )
+ # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( tool_data_path=self.config.tool_data_path,
+ config_filename=self.config.tool_data_table_config_path )
+ # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
+ self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
+ tool_data_path=self.tool_data_tables.tool_data_path,
+ from_shed_config=True )
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
if self.config.migrated_tools_config not in tool_configs:
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -15,17 +15,28 @@
"""Manages a collection of tool data tables"""
def __init__( self, tool_data_path, config_filename=None ):
self.tool_data_path = tool_data_path
- self.data_tables = {}
- # Store config elements for on-the-fly persistence.
- self.data_table_elems = []
+ # This stores all defined data table entries from both the tool_data_table_conf.xml file and the shed_tool_data_table_conf.xml file
+ # at server startup. If tool shed repositories are installed that contain a valid file named tool_data_table_conf.xml.sample, entries
+ # from that file are inserted into this dict at the time of installation.
+ self.data_tables = {}
+ # Store config elements for on-the-fly persistence to the defined shed_tool_data_table_config file name.
+ self.shed_data_table_elems = []
self.data_table_elem_names = []
if config_filename:
- self.load_from_config_file( config_filename, self.tool_data_path )
+ self.load_from_config_file( config_filename, self.tool_data_path, from_shed_config=False )
def __getitem__( self, key ):
return self.data_tables.__getitem__( key )
def __contains__( self, key ):
return self.data_tables.__contains__( key )
- def load_from_config_file( self, config_filename, tool_data_path ):
+ def load_from_config_file( self, config_filename, tool_data_path, from_shed_config=False ):
+ """
+ This method is called under 3 conditions:
+ 1) When the ToolDataTableManager is initialized (see __init__ above).
+ 2) Just after the ToolDataTableManager is initialized and the additional entries defined by shed_tool_data_table_conf.xml
+ are being loaded into the ToolDataTableManager.data_tables.
+ 3) When a tool shed repository that includes a tool_data_table_conf.xml.sample file is being installed into a local
+ Galaxy instance. In this case, we have 2 entry types to handle, files whose root tag is <tables>, for example:
+ """
tree = util.parse_xml( config_filename )
root = tree.getroot()
table_elems = []
@@ -36,13 +47,14 @@
table_elem_name = table_elem.get( 'name', None )
if table_elem_name and table_elem_name not in self.data_table_elem_names:
self.data_table_elem_names.append( table_elem_name )
- self.data_table_elems.append( table_elem )
+ if from_shed_config:
+ self.shed_data_table_elems.append( table_elem )
table = tool_data_table_types[ type ]( table_elem, tool_data_path )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Loaded tool data table '%s'", table.name )
return table_elems
- def add_new_entries_from_config_file( self, config_filename, tool_data_path, tool_data_table_config_path, persist=False ):
+ def add_new_entries_from_config_file( self, config_filename, tool_data_path, shed_tool_data_table_config, persist=False ):
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example:
@@ -65,7 +77,9 @@
# Make a copy of the current list of data_table_elem_names so we can persist later if changes to the config file are necessary.
original_data_table_elem_names = [ name for name in self.data_table_elem_names ]
if root.tag == 'tables':
- table_elems = self.load_from_config_file( config_filename, tool_data_path )
+ table_elems = self.load_from_config_file( config_filename=config_filename,
+ tool_data_path=tool_data_path,
+ from_shed_config=True )
else:
table_elems = []
type = root.get( 'type', 'tabular' )
@@ -74,23 +88,22 @@
table_elem_name = root.get( 'name', None )
if table_elem_name and table_elem_name not in self.data_table_elem_names:
self.data_table_elem_names.append( table_elem_name )
- self.data_table_elems.append( root )
+ self.shed_data_table_elems.append( root )
table = tool_data_table_types[ type ]( root, tool_data_path )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Added new tool data table '%s'", table.name )
if persist and self.data_table_elem_names != original_data_table_elem_names:
# Persist Galaxy's version of the changed tool_data_table_conf.xml file.
- self.to_xml_file( tool_data_table_config_path )
+ self.to_xml_file( shed_tool_data_table_config )
return table_elems
- def to_xml_file( self, tool_data_table_config_path ):
- """Write the current in-memory version of the tool_data-table_conf.xml file to disk."""
- full_path = os.path.abspath( tool_data_table_config_path )
+ def to_xml_file( self, shed_tool_data_table_config ):
+ """Write the current in-memory version of the shed_tool_data_table_conf.xml file to disk."""
+ full_path = os.path.abspath( shed_tool_data_table_config )
fd, filename = tempfile.mkstemp()
os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, "<!-- Use the file tool_data_table_conf.xml.oldlocstyle if you don't want to update your loc files as changed in revision 4550:535d276c92bc-->\n" )
os.write( fd, '<tables>\n' )
- for elem in self.data_table_elems:
+ for elem in self.shed_data_table_elems:
os.write( fd, '%s' % util.xml_to_string( elem ) )
os.write( fd, '</tables>\n' )
os.close( fd )
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -753,12 +753,15 @@
tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
return tool_dependencies_dict
def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
- resetting_all_metadata_on_repository=False, updating_installed_repository=False ):
+ resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be an absolute path to a temporary directory
containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path.
+
+ The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
+ should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
"""
if updating_installed_repository:
# Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
@@ -810,9 +813,9 @@
relative_path, filename = os.path.split( sample_file )
if filename == 'tool_data_table_conf.xml.sample':
new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
- tool_data_path=app.config.tool_data_path,
- tool_data_table_config_path=app.config.tool_data_table_config_path,
- persist=False )
+ tool_data_path=original_tool_data_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
+ persist=persist )
for root, dirs, files in os.walk( files_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
if '.hg' in dirs:
@@ -1768,15 +1771,15 @@
return tool, message, sample_files
def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
"""
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur)
- if call is from the Galaxy side (not the tool shed), the new entries will be appended to Galaxy's tool_data_table_conf.xml file on disk.
+ Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
+ if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
"""
error = False
message = ''
try:
new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
tool_data_path=app.config.tool_data_path,
- tool_data_table_config_path=app.config.tool_data_table_config_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
persist=persist )
except Exception, e:
message = str( e )
@@ -2143,7 +2146,8 @@
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
- updating_installed_repository=False )
+ updating_installed_repository=False,
+ persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
update_in_shed_tool_config( trans.app, repository )
@@ -2221,7 +2225,8 @@
relative_install_dir=repo_dir,
repository_files_dir=work_dir,
resetting_all_metadata_on_repository=True,
- updating_installed_repository=False )
+ updating_installed_repository=False,
+ persist=False )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
# We're at the first change set in the change log.
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -10,9 +10,8 @@
from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools, generate_metadata_for_changeset_revision
from galaxy.util.shed_util import get_changectx_for_changeset, get_config_from_disk, get_configured_ui, get_file_context_from_ctx, get_named_tmpfile_from_ctx
from galaxy.util.shed_util import get_parent_id, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
-from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config
-from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH, is_downloadable, load_tool_from_config, remove_dir
-from galaxy.util.shed_util import reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config, INITIAL_CHANGELOG_HASH
+from galaxy.util.shed_util import is_downloadable, load_tool_from_config, remove_dir, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
@@ -640,7 +639,8 @@
relative_install_dir=repo_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
- updating_installed_repository=False )
+ updating_installed_repository=False,
+ persist=False )
if metadata_dict:
downloadable = is_downloadable( metadata_dict )
repository_metadata = None
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -447,6 +447,12 @@
@web.expose
@web.require_admin
def deactivate_or_uninstall_repository( self, trans, **kwd ):
+ """
+ Handle all changes when a tool shed repository is being deactivated or uninstalled. Notice that if the repository contents include
+ a file named tool_data_table_conf.xml.sample, it's entries are not removed from the defined config.shed_tool_data_table_config. This
+ is because it becomes a bit complex to determine if other installed repositories include tools that require the same entry. For now
+ we'll never delete entries from config.shed_tool_data_table_config, but we may choose to do so in the future if it becomes necessary.
+ """
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
@@ -753,7 +759,8 @@
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
- updating_installed_repository=False )
+ updating_installed_repository=False,
+ persist=True )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -1491,7 +1498,8 @@
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
- updating_installed_repository=False )
+ updating_installed_repository=False,
+ persist=False )
repository.metadata = metadata_dict
if metadata_dict != original_metadata_dict:
update_in_shed_tool_config( trans.app, repository )
@@ -1675,7 +1683,8 @@
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
- updating_installed_repository=True )
+ updating_installed_repository=True,
+ persist=True )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 run.sh
--- a/run.sh
+++ b/run.sh
@@ -13,6 +13,7 @@
reports_wsgi.ini.sample
shed_tool_conf.xml.sample
tool_conf.xml.sample
+ shed_tool_data_table_conf.xml.sample
tool_data_table_conf.xml.sample
tool_sheds_conf.xml.sample
openid_conf.xml.sample
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 shed_tool_data_table_conf.xml.sample
--- /dev/null
+++ b/shed_tool_data_table_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<tables>
+</tables>
diff -r dbff28bde968a05b787dd53cfca182c53a81eeb7 -r bc5fa254bafc981c70bdffd394e9c44c8dda4ab8 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -149,6 +149,16 @@
#enable_tool_shed_check = False
#hours_between_check = 12
+# XML config file that contains data table entries for the ToolDataTableManager. This file is manually
+# maintained by the Galaxy administrator.
+#tool_data_table_config_path = tool_data_table_conf.xml
+
+# XML config file that contains additional data table entries for the ToolDataTableManager. This file
+# is automatically generated based on the current installed tool shed repositories that contain valid
+# tool_data_table_conf.xml.sample files. At the time of installation, these entries are automatically
+# added to the following file, which is parsed and applied to the ToolDataTableManager at server start up.
+#shed_tool_data_table_config = shed_tool_data_table_conf.xml
+
# Directory where data used by tools is located, see the samples in that
# directory and the wiki for help:
# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/dbff28bde968/
changeset: dbff28bde968
user: dan
date: 2012-11-07 19:50:06
summary: Allow unhiding all datasets in the current history.
affected #: 3 files
diff -r 932585f1dd8d67c28d2c22003af0d9ae318ef947 -r dbff28bde968a05b787dd53cfca182c53a81eeb7 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -694,6 +694,9 @@
@property
def get_disk_size_bytes( self ):
return self.get_disk_size( nice_size=False )
+ def unhide_datasets( self ):
+ for dataset in self.datasets:
+ dataset.mark_unhidden()
def get_disk_size( self, nice_size=False ):
# unique datasets only
db_session = object_session( self )
diff -r 932585f1dd8d67c28d2c22003af0d9ae318ef947 -r dbff28bde968a05b787dd53cfca182c53a81eeb7 lib/galaxy/webapps/galaxy/controllers/history.py
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -550,6 +550,20 @@
return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
@web.expose
+ def unhide_datasets( self, trans, current=False, ids=None ):
+ """Unhide the datasets in the active history -- this does not require a logged in user."""
+ if not ids and util.string_as_bool( current ):
+ histories = [ trans.get_history() ]
+ refresh_frames = ['history']
+ else:
+ raise NotImplementedError( "You can currently only unhide all the datasets of the current history." )
+ for history in histories:
+ history.unhide_datasets()
+ trans.sa_session.add( history )
+ trans.sa_session.flush()
+ return trans.show_ok_message( "Your datasets have been unhidden.", refresh_frames=refresh_frames )
+
+ @web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
diff -r 932585f1dd8d67c28d2c22003af0d9ae318ef947 -r dbff28bde968a05b787dd53cfca182c53a81eeb7 templates/root/index.mako
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -43,6 +43,11 @@
"${_("Show Hidden Datasets")}": function() {
galaxy_history.location = "${h.url_for( controller='root', action='history', show_hidden=True)}";
},
+ "${_("Unhide Hidden Datasets")}": function() {
+ if ( confirm( "Really unhide all hidden datasets?" ) ) {
+ galaxy_main.location = "${h.url_for( controller='history', action='unhide_datasets', current=True )}";
+ }
+ },
"${_("Purge Deleted Datasets")}": function() {
if ( confirm( "Really delete all deleted datasets permanently? This cannot be undone." ) ) {
galaxy_main.location = "${h.url_for( controller='history', action='purge_deleted_datasets' )}";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.