galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/da4342bc04c7/
changeset: da4342bc04c7
user: greg
date: 2011-12-18 15:16:52
summary: Send the persisted data types registry instead of None as the value of the datatypes_config parameter to the setup_external_metadata method when creating the cmd_line in the SetMetadataToolAction.execute() method. This eliminates the use of the hard-coded 'datatypes_conf.xml' file name when setting metadata externally.
affected #: 1 file
diff -r be0a75fd565845be5d6a9719552013548bb6a82f -r da4342bc04c779a50f50431fbec87d3d0c56576b lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py
+++ b/lib/galaxy/tools/actions/metadata.py
@@ -51,7 +51,7 @@
dataset_files_path = trans.app.model.Dataset.file_path,
output_fnames = None,
config_root = None,
- datatypes_config = None,
+ datatypes_config = trans.app.datatypes_registry.to_xml_file(),
job_metadata = None,
kwds = { 'overwrite' : overwrite } )
incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Trackster: fix bug in 13ba6909faae that prevented display in Dense mode.
by Bitbucket 17 Dec '11
by Bitbucket 17 Dec '11
17 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/be0a75fd5658/
changeset: be0a75fd5658
user: jgoecks
date: 2011-12-17 20:29:23
summary: Trackster: fix bug in 13ba6909faae that prevented display in Dense mode.
affected #: 1 file
diff -r e65f4e9539144c882f4f07355a033f53a1963ddd -r be0a75fd565845be5d6a9719552013548bb6a82f static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -3966,10 +3966,8 @@
return this.summary_draw_height + SUMMARY_TREE_TOP_PADDING;
}
else {
- var rows_required = 1;
- if (mode === "no_detail" || mode === "Squish" || mode === "Pack") {
- var rows_required = this.incremental_slots(w_scale, result.data, mode);
- }
+ // All other modes require slotting.
+ var rows_required = this.incremental_slots(w_scale, result.data, mode);
// HACK: use dummy painter to get required height. Painter should be extended so that get_required_height
// works as a static function.
var dummy_painter = new (this.painter)(null, null, null, this.prefs, mode);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Always attempt to remove previously written temporary xml files when persisting the current datatypes registry.
by Bitbucket 16 Dec '11
by Bitbucket 16 Dec '11
16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e65f4e953914/
changeset: e65f4e953914
user: greg
date: 2011-12-16 23:08:35
summary: Always attempt to remove previously written temporary xml files when persisting the current datatypes registry.
affected #: 2 files
diff -r 48b3531465ee90f26680291971c837670ad0b7f0 -r e65f4e9539144c882f4f07355a033f53a1963ddd lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -126,8 +126,7 @@
try:
# If the datatypes registry was persisted, attempt to
# remove the temporary file in which it was written.
- tmp_filename = self.datatypes_registry.xml_filename
- if tmp_filename:
- os.unlink( tmp_filename )
+ if self.datatypes_registry.xml_filename is not None:
+ os.unlink( self.datatypes_registry.xml_filename )
except:
pass
diff -r 48b3531465ee90f26680291971c837670ad0b7f0 -r e65f4e9539144c882f4f07355a033f53a1963ddd lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -432,6 +432,14 @@
rval[ 'auto' ] = rval[ 'txt' ]
return rval
def to_xml_file( self ):
+ if self.xml_filename is not None:
+ # If persisted previously, attempt to remove
+ # the temporary file in which we were written.
+ try:
+ os.unlink( self.xml_filename )
+ except:
+ pass
+ self.xml_filename = None
fd, filename = tempfile.mkstemp()
self.xml_filename = filename
if self.converters_path_attr:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Enhance the datatypes registry so that it can be persisted as an xml file, which is then used for all tools instead of the datatypes config file.
by Bitbucket 16 Dec '11
by Bitbucket 16 Dec '11
16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/48b3531465ee/
changeset: 48b3531465ee
user: greg
date: 2011-12-16 22:10:06
summary: Enhance the datatypes registry so that it can be persisted as an xml file, which is then used for all tools instead of the datatypes config file.
affected #: 11 files
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -118,9 +118,16 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
-
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
if self.heartbeat:
self.heartbeat.shutdown()
+ try:
+ # If the datatypes registry was persisted, attempt to
+ # remove the temporary file in which it was written.
+ tmp_filename = self.datatypes_registry.xml_filename
+ if tmp_filename:
+ os.unlink( tmp_filename )
+ except:
+ pass
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -544,6 +544,7 @@
if config_root is None:
config_root = os.path.abspath( os.getcwd() )
if datatypes_config is None:
+ raise Exception( 'In setup_external_metadata, the received datatypes_config is None.' )
datatypes_config = 'datatypes_conf.xml'
metadata_files_list = []
for dataset in datasets:
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -27,9 +27,15 @@
self.sniff_order = []
self.upload_file_formats = []
self.display_applications = odict() #map a display application id to a display application
+ self.converters_path_attr = None
self.datatype_converters_path = None
+ self.indexers_path_attr = None
self.datatype_indexers_path = None
+ self.display_path_attr = None
self.display_applications_path = None
+ self.datatype_elems = []
+ self.sniffer_elems = []
+ self.xml_filename = None
def load_datatypes( self, root_dir=None, config=None, imported_module=None ):
if root_dir and config:
inherit_display_application_by_class = []
@@ -45,16 +51,21 @@
# files installed with repositories from tool sheds must use the same paths. However, we
# may discover at some future time that allowing for multiple paths is more optimal.
if not self.datatype_converters_path:
- self.datatype_converters_path = os.path.join( root_dir, registration.get( 'converters_path', 'lib/galaxy/datatypes/converters' ) )
+ self.converters_path_attr = registration.get( 'converters_path', 'lib/galaxy/datatypes/converters' )
+ self.datatype_converters_path = os.path.join( root_dir, self.converters_path_attr )
if not os.path.isdir( self.datatype_converters_path ):
raise ConfigurationError( "Directory does not exist: %s" % self.datatype_converters_path )
if not self.datatype_indexers_path:
- self.datatype_indexers_path = os.path.join( root_dir, registration.get( 'indexers_path', 'lib/galaxy/datatypes/indexers' ) )
+ self.indexers_path_attr = registration.get( 'indexers_path', 'lib/galaxy/datatypes/indexers' )
+ self.datatype_indexers_path = os.path.join( root_dir, self.indexers_path_attr )
if not os.path.isdir( self.datatype_indexers_path ):
raise ConfigurationError( "Directory does not exist: %s" % self.datatype_indexers_path )
if not self.display_applications_path:
- self.display_applications_path = os.path.join( root_dir, registration.get( 'display_path', 'display_applications' ) )
+ self.display_path_attr = registration.get( 'display_path', 'display_applications' )
+ self.display_applications_path = os.path.join( root_dir, self.display_path_attr )
for elem in registration.findall( 'datatype' ):
+ # Keep an in-memory list of datatype elems to enable persistence.
+ self.datatype_elems.append( elem )
try:
extension = elem.get( 'extension', None )
dtype = elem.get( 'type', None )
@@ -147,6 +158,8 @@
sniffers = root.find( 'sniffers' )
if sniffers:
for elem in sniffers.findall( 'sniffer' ):
+ # Keep an in-memory list of sniffer elems to enable persistence.
+ self.sniffer_elems.append( elem )
dtype = elem.get( 'type', None )
if dtype:
try:
@@ -418,3 +431,31 @@
if 'auto' not in rval and 'txt' in rval: #need to manually add 'auto' datatype
rval[ 'auto' ] = rval[ 'txt' ]
return rval
+ def to_xml_file( self ):
+ fd, filename = tempfile.mkstemp()
+ self.xml_filename = filename
+ if self.converters_path_attr:
+ converters_path_str = ' converters_path="%s"' % self.converters_path_attr
+ else:
+ converters_path_str = ''
+ if self.indexers_path_attr:
+ indexers_path_str = ' indexers_path="%s"' % self.indexers_path_attr
+ else:
+ indexers_path_str = ''
+ if self.display_path_attr:
+ display_path_str = ' display_path="%s"' % self.display_path_attr
+ else:
+ display_path_str = ''
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '<registration%s%s%s>\n' % ( converters_path_str, indexers_path_str, display_path_str ) )
+ for elem in self.datatype_elems:
+ os.write( fd, '%s' % galaxy.util.xml_to_string( elem ) )
+ os.write( fd, '</registration>\n' )
+ os.write( fd, '<sniffers>\n' )
+ for elem in self.sniffer_elems:
+ os.write( fd, '%s' % galaxy.util.xml_to_string( elem ) )
+ os.write( fd, '</sniffers>\n' )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ return filename
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -868,7 +868,7 @@
if config_root is None:
config_root = self.app.config.root
if datatypes_config is None:
- datatypes_config = self.app.config.datatypes_config
+ datatypes_config = self.app.datatypes_registry.to_xml_file()
return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ],
self.sa_session,
exec_dir = exec_dir,
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -14,7 +14,8 @@
def load_datatypes( self ):
for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \
.filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True,
- self.model.ToolShedRepository.table.c.deleted==False ) ):
+ self.model.ToolShedRepository.table.c.deleted==False ) ) \
+ .order_by( self.model.ToolShedRepository.table.c.id ):
metadata = tool_shed_repository.metadata
datatypes_config = metadata[ 'datatypes_config' ]
full_path = os.path.abspath( datatypes_config )
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1672,7 +1672,8 @@
# For the upload tool, we need to know the root directory and the
# datatypes conf path, so we can load the datatypes registry
param_dict['__root_dir__'] = param_dict['GALAXY_ROOT_DIR'] = os.path.abspath( self.app.config.root )
- param_dict['__datatypes_config__'] = param_dict['GALAXY_DATATYPES_CONF_FILE'] = os.path.abspath( self.app.config.datatypes_config )
+ datatypes_config = self.app.datatypes_registry.to_xml_file()
+ param_dict['__datatypes_config__'] = param_dict['GALAXY_DATATYPES_CONF_FILE'] = os.path.abspath( datatypes_config )
# Return the dictionary of parameters
return param_dict
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -107,10 +107,28 @@
ElementInclude.include(root)
return tree
-def xml_to_string(elem):
- """Returns an string from and xml tree"""
- text = ElementTree.tostring(elem)
- return text
+def xml_to_string( elem, pretty=False ):
+ """Returns a string from an xml tree"""
+ if pretty:
+ return ElementTree.tostring( pretty_print_xml( elem ) )
+ return ElementTree.tostring( elem )
+
+def pretty_print_xml( elem, level=0 ):
+ pad = ' '
+ i = "\n" + level * pad
+ if len( elem ):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + pad + pad
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for e in elem:
+ pretty_print_xml( e, level + 1 )
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and ( not elem.tail or not elem.tail.strip() ):
+ elem.tail = i + pad
+ return elem
# characters that are valid
valid_chars = set(string.letters + string.digits + " -=_.()/+*^,:?!")
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/util/none_like.py
--- a/lib/galaxy/util/none_like.py
+++ b/lib/galaxy/util/none_like.py
@@ -21,6 +21,7 @@
self.ext = self.extension = ext
self.dbkey = dbkey
if datatypes_registry is None:
+ # Default Value Required for unit tests
datatypes_registry = Registry()
datatypes_registry.load_datatypes()
self.datatype = datatypes_registry.get_datatype_by_extension( ext )
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -9,7 +9,7 @@
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree, ElementInclude
-from elementtree.ElementTree import Element, SubElement, tostring
+from elementtree.ElementTree import Element, SubElement
log = logging.getLogger( __name__ )
@@ -39,7 +39,7 @@
if line.startswith( '</toolbox>' ):
# We're at the end of the original config file, so add our entry.
new_shed_tool_conf.write( ' ' )
- new_shed_tool_conf.write( tostring( pretty_print_xml( tool_panel_entry ) ) )
+ new_shed_tool_conf.write( util.xml_to_string( tool_panel_entry, pretty=True ) )
new_shed_tool_conf.write( line )
else:
new_shed_tool_conf.write( line )
@@ -554,22 +554,6 @@
log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict )
return metadata_dict
-def pretty_print_xml( elem, level=0 ):
- pad = ' '
- i = "\n" + level * pad
- if len( elem ):
- if not elem.text or not elem.text.strip():
- elem.text = i + pad + pad
- if not elem.tail or not elem.tail.strip():
- elem.tail = i
- for e in elem:
- pretty_print_xml( e, level + 1 )
- if not elem.tail or not elem.tail.strip():
- elem.tail = i
- else:
- if level and ( not elem.tail or not elem.tail.strip() ):
- elem.tail = i + pad
- return elem
def pull_repository( current_working_dir, repo_files_dir, name ):
# Pull the latest possible contents to the repository.
log.debug( "Pulling latest updates to the repository named '%s'" % name )
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -22,10 +22,6 @@
from Cheetah.Template import Template
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree, ElementInclude
-from elementtree.ElementTree import Element, SubElement, tostring
-
log = logging.getLogger( __name__ )
# States for passing messages
diff -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a -r 48b3531465ee90f26680291971c837670ad0b7f0 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -16,6 +16,7 @@
config.configure_logging( self.config )
# Set up datatypes registry
self.datatypes_registry = galaxy.datatypes.registry.Registry()
+ # TODO: Handle datatypes included in repositories - the following will only load datatypes_conf.xml.
self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
# Determine the database url
if self.config.database_connection:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c6764f7a359c/
changeset: c6764f7a359c
user: dan
date: 2011-12-16 20:39:40
summary: Add Picard Interval List to BED6 converter.
affected #: 3 files
diff -r 11710de1b1fbf52ae3a9c860a7d36e76acf5e060 -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -101,7 +101,9 @@
<display file="ensembl/ensembl_interval_as_bed.xml" inherit="True"/><display file="gbrowse/gbrowse_interval_as_bed.xml" inherit="True"/></datatype>
- <datatype extension="picard_interval_list" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/>
+ <datatype extension="picard_interval_list" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True">
+ <converter file="picard_interval_list_to_bed6_converter.xml" target_datatype="bed6"/>
+ </datatype><datatype extension="gatk_interval" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/><datatype extension="gatk_dbsnp" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True"/><datatype extension="gatk_tranche" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True"/>
diff -r 11710de1b1fbf52ae3a9c860a7d36e76acf5e060 -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#Dan Blankenberg
+
+import sys
+
+assert sys.version_info[:2] >= ( 2, 5 )
+HEADER_STARTS_WITH = ( '@' )
+
+def __main__():
+ input_name = sys.argv[1]
+ output_name = sys.argv[2]
+ skipped_lines = 0
+ first_skipped_line = 0
+ header_lines = 0
+ out = open( output_name, 'w' )
+ i = 0
+ for i, line in enumerate( open( input_name ) ):
+ complete_interval = False
+ line = line.rstrip( '\r\n' )
+ if line:
+ if line.startswith( HEADER_STARTS_WITH ):
+ header_lines += 1
+ else:
+ try:
+ elems = line.split( '\t' )
+ if len( elems ) >= 5:
+ complete_interval = True
+ out.write( '%s\t%s\t%s\t%s\t0\t%s\n' % ( elems[0], int(elems[1])-1, elems[2], elems[4], elems[3] ) )
+ except Exception, e:
+ print e
+ skipped_lines += 1
+ if not first_skipped_line:
+ first_skipped_line = i + 1
+ else:
+ skipped_lines += 1
+ if not first_skipped_line:
+ first_skipped_line = i + 1
+ out.close()
+ info_msg = "%i lines converted to BED. " % ( i + 1 - skipped_lines )
+ if skipped_lines > 0:
+ info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+ print info_msg
+
+if __name__ == "__main__": __main__()
diff -r 11710de1b1fbf52ae3a9c860a7d36e76acf5e060 -r c6764f7a359cf26c81cfc14ff0e9e94e93b6c82a lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.xml
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.xml
@@ -0,0 +1,12 @@
+<tool id="CONVERTER_picard_interval_list_to_bed6" name="Convert Picard Interval List to BED6" version="1.0.0">
+ <description>converter</description>
+ <command interpreter="python">picard_interval_list_to_bed6_converter.py "$input" "$output"</command>
+ <inputs>
+ <param name="input" type="data" format="picard_interval_list" label="Picard Interval List file"/>
+ </inputs>
+ <outputs>
+ <data name="output" format="bed6"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Add Maximal Information-based Nonparametric Exploration (MINE) tool.
by Bitbucket 16 Dec '11
by Bitbucket 16 Dec '11
16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/11710de1b1fb/
changeset: 11710de1b1fb
user: dan
date: 2011-12-16 18:54:21
summary: Add Maximal Information-based Nonparametric Exploration (MINE) tool.
affected #: 3 files
diff -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 -r 11710de1b1fbf52ae3a9c860a7d36e76acf5e060 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -149,6 +149,7 @@
<tool file="stats/plot_from_lda.xml" /><tool file="regVariation/t_test_two_samples.xml" /><tool file="regVariation/compute_q_values.xml" />
+ <tool file="stats/MINE.xml" /><label text="GFF" id="gff" /><tool file="stats/count_gff_features.xml" />
diff -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 -r 11710de1b1fbf52ae3a9c860a7d36e76acf5e060 tools/stats/MINE.xml
--- /dev/null
+++ b/tools/stats/MINE.xml
@@ -0,0 +1,82 @@
+<tool id="maximal_information_based_nonparametric_exploration" name="MINE" version="0.0.1">
+ <description>- Maximal Information-based Nonparametric Exploration</description>
+ <requirements>
+ <requirement type="package" version="1.0">MINE</requirement>
+ </requirements>
+ <command interpreter="python">mine_wrapper.py
+ --jar "${GALAXY_DATA_INDEX_DIR}/shared/jars/mine/MINE.jar"
+
+ --infile "${input_file}"
+
+ #if str( $master_variable_type.master_variable_type_selector ) in [ 'allPairs', 'adjacentPairs' ]:
+ --master_variable "${master_variable_type.master_variable_type_selector}"
+ #else:
+ --master_variable "${master_variable_type.master_variable}"
+ #end if
+
+ --cv "${cv}"
+
+ --exp "${exp}"
+
+ --c "${c}"
+
+ ##--gc ##skip
+
+
+ #if str( $master_variable_type.master_variable_type_selector ) != 'allPairs' and $master_variable_type.permute:
+ --permute
+ #end if
+
+ --output_results "${output_results}"
+
+ --output_log "${output_log}"
+ </command>
+ <inputs>
+ <param name="input_file" type="data" format="csv" label="CSV file" />
+
+ <conditional name="master_variable_type">
+ <param name="master_variable_type_selector" type="select" label="Choose the master variable type">
+ <option value="allPairs">allPairs</option>
+ <option value="adjacentPairs">adjacentPairs</option>
+ <option value="compare_against_ith" selected="True">compare against i-th</option>
+ </param>
+ <when value="compare_against_ith">
+ <param type="integer" value="0" name="master_variable" />
+ <param type="boolean" truevalue="--permute" false_value="" name="permute" checked="False" />
+ </when>
+ <when value="adjacentPairs">
+ <param type="boolean" truevalue="--permute" false_value="" name="permute" checked="False" />
+ </when>
+ </conditional>
+
+ <param type="float" value="0" name="cv" />
+
+ <param type="float" value="0.6" name="exp" />
+
+ <param type="float" value="15" name="c" />
+
+ </inputs>
+ <outputs>
+ <data format="csv" name="output_results" label="${tool.name} on ${on_string} (Results)" />
+ <data format="txt" name="output_log" label="${tool.name} on ${on_string} (log)" />
+ </outputs>
+ <tests>
+ <!-- TODO -->
+ </tests>
+ <help>
+**What it does**
+
+Applies the Maximal Information-based Nonparametric Exploration strategy to an input dataset.
+
+See http://www.exploredata.net/ for more information.
+
+------
+
+**Citation**
+
+For the underlying tool, please cite `David N. Reshef, Yakir A. Reshef, Hilary K. Finucane5, Sharon R. Grossman, Gilean McVean, Peter J. Turnbaugh, Eric S. Lander, Michael Mitzenmacher, Pardis C. Sabeti Detecting Novel Associations in Large Data Sets. Science. 2011 Dec. <http://www.sciencemag.org/content/334/6062/1518>`_
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
+ </help>
+</tool>
diff -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 -r 11710de1b1fbf52ae3a9c860a7d36e76acf5e060 tools/stats/mine_wrapper.py
--- /dev/null
+++ b/tools/stats/mine_wrapper.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+#Dan Blankenberg
+
+"""
+A wrapper script for running the MINE.jar commands.
+"""
+
+import sys, optparse, os, tempfile, subprocess, shutil
+
+CHUNK_SIZE = 2**20 #1mb
+
+BASE_NAME = "galaxy_mime_file.txt"
+JOB_ID = "galaxy_mine"
+
+def cleanup_before_exit( tmp_dir ):
+ if tmp_dir and os.path.exists( tmp_dir ):
+ print os.listdir( tmp_dir )
+ shutil.rmtree( tmp_dir )
+
+def open_file_from_option( filename, mode = 'rb' ):
+ if filename:
+ return open( filename, mode = mode )
+ return None
+
+
+def __main__():
+ #Parse Command Line
+ parser = optparse.OptionParser()
+ parser.add_option( '-j', '--jar', dest='jar', action='store', type="string", help='Location of JAR file' )
+ parser.add_option( '-i', '--infile', dest='infile', action='store', type="string", help='infile' )
+ parser.add_option( '-m', '--master_variable', dest='master_variable', action='store', type="string", help='master_variable' )
+ parser.add_option( '-v', '--cv', dest='cv', action='store', type="string", help='cv' )
+ parser.add_option( '-e', '--exp', dest='exp', action='store', type="string", help='exp' )
+ parser.add_option( '-c', '--c', dest='c', action='store', type="string", help='c' )
+ parser.add_option( '-p', '--permute', dest='permute', action='store_true', default=False, help='permute' )
+ parser.add_option( '-o', '--output_results', dest='output_results', action='store', type="string", help='output_results' )
+ parser.add_option( '-l', '--output_log', dest='output_log', action='store', type="string", help='output_log' )
+ parser.add_option( '', '--stdout', dest='stdout', action='store', type="string", default=None, help='If specified, the output of stdout will be written to this file.' )
+ parser.add_option( '', '--stderr', dest='stderr', action='store', type="string", default=None, help='If specified, the output of stderr will be written to this file.' )
+ (options, args) = parser.parse_args()
+
+ tmp_dir = tempfile.mkdtemp( prefix='tmp-MINE-' )
+ tmp_input_name = os.path.join( tmp_dir, BASE_NAME )
+ if options.permute:
+ permute = "-permute"
+ else:
+ permute = ""
+
+ os.symlink( options.infile, tmp_input_name )
+
+ cmd = 'java -jar "%s" "%s" %s -cv%s -exp%s -c%s %s "%s"' % ( options.jar, tmp_input_name, options.master_variable, options.cv, options.exp, options.c, permute, JOB_ID )
+ print cmd
+
+ #set up stdout and stderr output options
+ stdout = open_file_from_option( options.stdout, mode = 'wb' )
+ stderr = open_file_from_option( options.stderr, mode = 'wb' )
+ #if no stderr file is specified, we'll use our own
+ if stderr is None:
+ stderr = tempfile.NamedTemporaryFile( prefix="MINE-stderr-", dir=tmp_dir )
+
+ proc = subprocess.Popen( args=cmd, stdout=stdout, stderr=stderr, shell=True, cwd=tmp_dir )
+ return_code = proc.wait()
+
+ if return_code:
+ stderr_target = sys.stderr
+ else:
+ stderr_target = sys.stdout
+ stderr.flush()
+ stderr.seek(0)
+ while True:
+ chunk = stderr.read( CHUNK_SIZE )
+ if chunk:
+ stderr_target.write( chunk )
+ else:
+ break
+ stderr.close()
+
+ print os.listdir( tmp_dir )
+
+ shutil.move( '%s,%s,Results.csv' % ( tmp_input_name, JOB_ID ), options.output_results )
+ shutil.move( '%s,%s,Status.csv' % ( tmp_input_name, JOB_ID ), options.output_log )
+
+ cleanup_before_exit( tmp_dir )
+
+if __name__=="__main__": __main__()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/42b4bb82e006/
changeset: 42b4bb82e006
user: greg
date: 2011-12-16 17:34:35
summary: 1) Add a new InstalledRepositoryManager class which currently enables loading datatypes from previously installed tool shed repositories into the datatypes registry. This component will enable additional features in the future.
2) Enhance the Iinstall_repository method in the admin_toolshed controller to skip displaying the page for selecting a tool panel section if no tools exist in any of the repositories being installed.
3) Eliminate duplicate generate_datatypes_metadata generate_tool_metadata generate_workflow_metadata methods and use those now contained in shed_util.py.
4) Fixes for handling tool shed repository metadata for repositories that do not include any tools.
affected #: 10 files
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -22,7 +22,7 @@
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
- # Set up datatypes registry
+ # Initialize the datatypes registry to the default data types included in self.config.datatypes_config.
self.datatypes_registry = galaxy.datatypes.registry.Registry()
self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
galaxy.model.set_datatypes_registry( self.datatypes_registry )
@@ -68,6 +68,10 @@
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed import update_manager
self.update_manager = update_manager.UpdateManager( self )
+ # Manage installed tool shed repositories
+ self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
+ # Add additional datatypes from installed tool shed repositories to the datatypes registry.
+ self.installed_repository_manager.load_datatypes()
# Load datatype converters
self.datatypes_registry.load_datatype_converters( self.toolbox )
# Load history import/export tools
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -1,3 +1,22 @@
"""
-Classes encapsulating the relationships between Galaxy and Galaxy tool sheds.
-"""
\ No newline at end of file
+Classes encapsulating the management of repositories installed from Galaxy tool sheds.
+"""
+import os, logging
+from galaxy.model.orm import *
+
+log = logging.getLogger(__name__)
+
+class InstalledRepositoryManager( object ):
+ def __init__( self, app ):
+ self.app = app
+ self.model = self.app.model
+ self.sa_session = self.model.context.current
+ def load_datatypes( self ):
+ for tool_shed_repository in self.sa_session.query( self.model.ToolShedRepository ) \
+ .filter( and_( self.model.ToolShedRepository.table.c.includes_datatypes==True,
+ self.model.ToolShedRepository.table.c.deleted==False ) ):
+ metadata = tool_shed_repository.metadata
+ datatypes_config = metadata[ 'datatypes_config' ]
+ full_path = os.path.abspath( datatypes_config )
+ self.app.datatypes_registry.load_datatypes( self.app.config.root, full_path )
+
\ No newline at end of file
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -158,4 +158,13 @@
if os.path.exists( clone_dir ):
installed = True
break
+ if not installed:
+ full_path = os.path.abspath( clone_dir )
+ # We may have a repository that contains no tools.
+ if os.path.exists( full_path ):
+ for root, dirs, files in os.walk( full_path ):
+ if '.hg' in dirs:
+ # Assume that the repository has been installed if we find a .hg directory.
+ installed = True
+ break
return installed
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -109,6 +109,7 @@
Update the received metadata_dict with changes that have been applied
to the received datatypes_config. This method is used by the InstallManager,
which does not have access to trans.
+ TODO: Handle converters, indexers, sniffers, etc...
"""
# Parse datatypes_config.
tree = ElementTree.parse( datatypes_config )
@@ -125,13 +126,29 @@
registration = root.find( 'registration' )
if registration:
for elem in registration.findall( 'datatype' ):
- extension = elem.get( 'extension', None )
+ datatypes_dict = {}
+ display_in_upload = elem.get( 'display_in_upload', None )
+ if display_in_upload:
+ datatypes_dict[ 'display_in_upload' ] = display_in_upload
dtype = elem.get( 'type', None )
+ if dtype:
+ datatypes_dict[ 'dtype' ] = dtype
+ extension = elem.get( 'extension', None )
+ if extension:
+ datatypes_dict[ 'extension' ] = extension
+ max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ if max_optional_metadata_filesize:
+ datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize
mimetype = elem.get( 'mimetype', None )
- datatypes.append( dict( extension=extension,
- dtype=dtype,
- mimetype=mimetype ) )
- metadata_dict[ 'datatypes' ] = datatypes
+ if mimetype:
+ datatypes_dict[ 'mimetype' ] = mimetype
+ subclass = elem.get( 'subclass', None )
+ if subclass:
+ datatypes_dict[ 'subclass' ] = subclass
+ if datatypes_dict:
+ datatypes.append( datatypes_dict )
+ if datatypes:
+ metadata_dict[ 'datatypes' ] = datatypes
return metadata_dict
def generate_metadata( toolbox, relative_install_dir, repository_clone_url ):
"""
@@ -426,46 +443,52 @@
# This method is used by the InstallManager, which does not have access to trans.
imported_module = None
# Parse datatypes_config.
- tree = parse_xml( datatypes_config )
+ tree = util.parse_xml( datatypes_config )
datatypes_config_root = tree.getroot()
relative_path_to_datatype_file_name = None
datatype_files = datatypes_config_root.find( 'datatype_files' )
- # Currently only a single datatype_file is supported. For example:
- # <datatype_files>
- # <datatype_file name="gmap.py"/>
- # </datatype_files>
- for elem in datatype_files.findall( 'datatype_file' ):
- datatype_file_name = elem.get( 'name', None )
- if datatype_file_name:
- # Find the file in the installed repository.
- for root, dirs, files in os.walk( relative_intall_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == datatype_file_name:
- relative_path_to_datatype_file_name = os.path.join( root, name )
- break
- break
- if relative_path_to_datatype_file_name:
- relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
- registration = datatypes_config_root.find( 'registration' )
- # Get the module by parsing the <datatype> tag.
- for elem in registration.findall( 'datatype' ):
- # A 'type' attribute is currently required. The attribute
- # should be something like: type="gmap:GmapDB".
- dtype = elem.get( 'type', None )
- if dtype:
- fields = dtype.split( ':' )
- datatype_module = fields[0]
- datatype_class_name = fields[1]
- # Since we currently support only a single datatype_file,
- # we have what we need.
+ if datatype_files:
+ # Currently only a single datatype_file is supported. For example:
+ # <datatype_files>
+ # <datatype_file name="gmap.py"/>
+ # </datatype_files>
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ for root, dirs, files in os.walk( relative_intall_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ relative_path_to_datatype_file_name = os.path.join( root, name )
+ break
break
- try:
- sys.path.insert( 0, relative_head )
- imported_module = __import__( datatype_module )
- sys.path.pop( 0 )
- except Exception, e:
- log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) )
+ if relative_path_to_datatype_file_name:
+ relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
+ registration = datatypes_config_root.find( 'registration' )
+ # Get the module by parsing the <datatype> tag.
+ for elem in registration.findall( 'datatype' ):
+ # A 'type' attribute is currently required. The attribute
+ # should be something like: type="gmap:GmapDB".
+ dtype = elem.get( 'type', None )
+ if dtype:
+ fields = dtype.split( ':' )
+ datatype_module = fields[0]
+ datatype_class_name = fields[1]
+ # Since we currently support only a single datatype_file,
+ # we have what we need.
+ break
+ try:
+ sys.path.insert( 0, relative_head )
+ imported_module = __import__( datatype_module )
+ sys.path.pop( 0 )
+ except Exception, e:
+ log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) )
+ else:
+ # The repository includes a datayptes_conf.xml file, but no code file that
+ # contains data type classes. This implies that the data types in datayptes_conf.xml
+ # are all subclasses of data types that are in the distribution.
+ imported_module = None
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_module=imported_module )
def load_repository_contents( app, name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir,
current_working_dir, tmp_name, tool_section=None, shed_tool_conf=None, new_install=True ):
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -191,13 +191,20 @@
repo_info_dict = kwd[ 'repo_info_dict' ]
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
- if kwd.get( 'select_tool_panel_section_button', False ):
- shed_tool_conf = kwd[ 'shed_tool_conf' ]
+ includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
+ if not includes_tools or ( includes_tools and kwd.get( 'select_tool_panel_section_button', False ) ):
+ if includes_tools:
+ shed_tool_conf = kwd[ 'shed_tool_conf' ]
+ else:
+ # If installing a repository that includes no tools, get the relative
+ # tool_path from the file to which the install_tool_config_file config
+ # setting points.
+ shed_tool_conf = trans.app.config.install_tool_config
# Get the tool path.
for k, tool_path in trans.app.toolbox.shed_tool_confs.items():
if k == shed_tool_conf:
break
- if new_tool_panel_section or tool_panel_section:
+ if includes_tools and ( new_tool_panel_section or tool_panel_section ):
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
new_section_key = 'section_%s' % str( section_id )
@@ -290,6 +297,7 @@
tool_shed_url=tool_shed_url,
repo_info_dict=repo_info_dict,
shed_tool_conf=shed_tool_conf,
+ includes_tools=includes_tools,
shed_tool_conf_select_field=shed_tool_conf_select_field,
tool_panel_section_select_field=tool_panel_section_select_field,
new_tool_panel_section=new_tool_panel_section,
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -4,6 +4,7 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
+from galaxy.util.shed_util import generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -149,6 +150,7 @@
.order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
.first()
def generate_clone_url( trans, repository_id ):
+ """Generate the URL for cloning a repository."""
repository = get_repository( trans, repository_id )
protocol, base = trans.request.base.split( '://' )
if trans.user:
@@ -220,54 +222,6 @@
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_tail )
invalid_files.append( ( name, correction_msg ) )
return can_set_metadata, invalid_files
-def generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been
- applied to the received tool.
- """
- repository = get_repository( trans, id )
- # Handle tool.requirements.
- tool_requirements = []
- for tr in tool.requirements:
- name=tr.name
- type=tr.type
- if type == 'fabfile':
- version = None
- fabfile = tr.fabfile
- method = tr.method
- else:
- version = tr.version
- fabfile = None
- method = None
- requirement_dict = dict( name=name,
- type=type,
- version=version,
- fabfile=fabfile,
- method=method )
- tool_requirements.append( requirement_dict )
- # Handle tool.tests.
- tool_tests = []
- if tool.tests:
- for ttb in tool.tests:
- test_dict = dict( name=ttb.name,
- required_files=ttb.required_files,
- inputs=ttb.inputs,
- outputs=ttb.outputs )
- tool_tests.append( test_dict )
- tool_dict = dict( id=tool.id,
- guid = generate_tool_guid( trans, repository, tool ),
- name=tool.name,
- version=tool.version,
- description=tool.description,
- version_string_cmd = tool.version_string_cmd,
- tool_config=tool_config,
- requirements=tool_requirements,
- tests=tool_tests )
- if 'tools' in metadata_dict:
- metadata_dict[ 'tools' ].append( tool_dict )
- else:
- metadata_dict[ 'tools' ] = [ tool_dict ]
- return metadata_dict
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
@@ -309,16 +263,6 @@
# The received metadata_dict includes no metadata for tools, so a new repository_metadata table
# record is not needed.
return False
-def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received exported_workflow_dict. Store everything in the database.
- """
- if 'workflows' in metadata_dict:
- metadata_dict[ 'workflows' ].append( exported_workflow_dict )
- else:
- metadata_dict[ 'workflows' ] = [ exported_workflow_dict ]
- return metadata_dict
def new_workflow_metadata_required( trans, id, metadata_dict ):
"""
Currently everything about an exported workflow except the name is hard-coded, so there's
@@ -337,34 +281,6 @@
# The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
# record is not needed.
return False
-def generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received datatypes_config.
- """
- # Parse datatypes_config.
- tree = ElementTree.parse( datatypes_config )
- root = tree.getroot()
- ElementInclude.include( root )
- repository_datatype_code_files = []
- datatype_files = root.find( 'datatype_files' )
- if datatype_files:
- for elem in datatype_files.findall( 'datatype_file' ):
- name = elem.get( 'name', None )
- repository_datatype_code_files.append( name )
- metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
- datatypes = []
- registration = root.find( 'registration' )
- if registration:
- for elem in registration.findall( 'datatype' ):
- extension = elem.get( 'extension', None )
- dtype = elem.get( 'type', None )
- mimetype = elem.get( 'mimetype', None )
- datatypes.append( dict( extension=extension,
- dtype=dtype,
- mimetype=mimetype ) )
- metadata_dict[ 'datatypes' ] = datatypes
- return metadata_dict
def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir ):
# Browse the repository tip files on disk to generate metadata. This is faster than
# the generate_metadata_for_changeset_revision() method below because fctx.data() does
@@ -382,7 +298,7 @@
datatypes_config = os.path.abspath( os.path.join( root, name ) )
break
if datatypes_config:
- metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
# Find all special .sample files.
for root, dirs, files in os.walk( repo_dir ):
if root.find( '.hg' ) < 0:
@@ -409,19 +325,19 @@
if can_set_metadata:
# Update the list of metadata dictionaries for tools in metadata_dict.
tool_config = os.path.join( root, name )
- metadata_dict = generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict )
+ repository_clone_url = generate_clone_url( trans, id )
+ metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
# Find all exported workflows
elif name.endswith( '.ga' ):
try:
- full_path = os.path.abspath( os.path.join( root, name ) )
+ relative_path = os.path.join( root, name )
# Convert workflow data from json
- fp = open( full_path, 'rb' )
+ fp = open( relative_path, 'rb' )
workflow_text = fp.read()
fp.close()
exported_workflow_dict = from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- # Update the list of metadata dictionaries for workflows in metadata_dict.
- metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict )
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
except Exception, e:
invalid_files.append( ( name, str( e ) ) )
return metadata_dict, invalid_files
@@ -438,7 +354,7 @@
datatypes_config = fctx.data()
break
if datatypes_config:
- metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
# Get all tool config file names from the hgweb url, something like:
# /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
for filename in ctx:
@@ -469,7 +385,8 @@
# anything, but may result in a bit of confusion when maintaining the code / data over time.
# IMPORTANT NOTE: Here we are assuming that since the current change set is not the repository
# tip, we do not have to handle any .loc.sample files since they would have been handled previously.
- metadata_dict = generate_tool_metadata( trans, id, changeset_revision, filename, tool, metadata_dict )
+ repository_clone_url = generate_clone_url( trans, id )
+ metadata_dict = generate_tool_metadata( filename, tool, repository_clone_url, metadata_dict )
try:
os.unlink( tmp_filename )
except:
@@ -481,8 +398,7 @@
workflow_text = fctx.data()
exported_workflow_dict = from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- # Update the list of metadata dictionaries for workflows in metadata_dict.
- metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict )
+ metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict )
except Exception, e:
invalid_files.append( ( name, str( e ) ) )
return metadata_dict, invalid_files
@@ -510,12 +426,18 @@
if len( repository.downloadable_revisions ) == 1:
handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
else:
- # Update the last saved repository_metadata table row.
repository_metadata = get_latest_repository_metadata( trans, id )
- repository_metadata.changeset_revision = changeset_revision
- repository_metadata.metadata = metadata_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ if repository_metadata:
+ # Update the last saved repository_metadata table row.
+ repository_metadata.changeset_revision = changeset_revision
+ repository_metadata.metadata = metadata_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ else:
+ # There are no tools in the repository, and we're setting metadat on the repository tip.
+ repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
else:
# We're re-generating metadata for an old repository revision.
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -436,9 +436,9 @@
**kwd ) )
if operation == "install":
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
- encoded_repo_info_dict = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
- url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict )
+ encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
+ url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \
+ ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
else:
# This can only occur when there is a multi-select grid with check boxes and an operation,
@@ -512,9 +512,9 @@
**kwd ) )
if operation == "install":
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
- encoded_repo_info_dict = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
- url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict )
+ encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
+ url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \
+ ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
else:
# This can only occur when there is a multi-select grid with check boxes and an operation,
@@ -707,14 +707,17 @@
return match_tuples
def __encode_repo_info_dict( self, trans, webapp, repository_metadata_ids ):
repo_info_dict = {}
+ includes_tools = False
for repository_metadata_id in repository_metadata_ids:
repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ if not includes_tools and 'tools' in repository_metadata.metadata:
+ includes_tools = True
repository = get_repository( trans, trans.security.encode_id( repository_metadata.repository_id ) )
repository_id = trans.security.encode_id( repository.id )
changeset_revision = repository_metadata.changeset_revision
repository_clone_url = generate_clone_url( trans, repository_id )
repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision )
- return encode( repo_info_dict )
+ return encode( repo_info_dict ), includes_tools
@web.expose
def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
params = util.Params( kwd )
@@ -755,12 +758,16 @@
repository_clone_url = generate_clone_url( trans, repository_id )
repository = get_repository( trans, repository_id )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ # Tell the caller if the repository includes Galaxy tools so the page
+ # enabling selection of the tool panel section can be displayed.
+ includes_tools = 'tools' in repository_metadata.metadata
repo_info_dict = {}
repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision )
encoded_repo_info_dict = encode( repo_info_dict )
# Redirect back to local Galaxy to perform install.
- url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), encoded_repo_info_dict )
+ url = '%s/admin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
+ ( galaxy_url, url_for( '', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
def check_for_updates( self, trans, **kwd ):
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -128,25 +128,22 @@
<table class="grid"><tr><td><b>extension</b></td>
- <td><b>dtype</b></td>
+ <td><b>type</b></td><td><b>mimetype</b></td>
+ <td><b>subclass</b></td></tr>
%for datatypes_dict in datatypes_dicts:
- <%
- extension = datatypes_dict[ 'extension' ]
- dtype = datatypes_dict[ 'dtype' ]
- mimetype = datatypes_dict[ 'mimetype' ]
+ <%
+ extension = datatypes_dict.get( 'extension', ' ' )
+ dtype = datatypes_dict.get( 'dtype', ' ' )
+ mimetype = datatypes_dict.get( 'mimetype', ' ' )
+ subclass = datatypes_dict.get( 'subclass', ' ' )
%><tr><td>${extension}</td><td>${dtype}</td>
- <td>
- %if mimetype:
- ${mimetype}
- %else:
-
- %endif
- </td>
+ <td>${mimetype}</td>
+ <td>${subclass}</td></tr>
%endfor
</table>
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -25,7 +25,7 @@
<div class="toolForm"><div class="toolFormTitle">Choose tool panel section to contain installed tools (optional)</div><div class="toolFormBody">
- <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict )}" method="post" >
+ <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict, includes_tools=includes_tools )}" method="post" >
%if shed_tool_conf_select_field:
<div class="form-row"><label>Shed tool configuration file:</label>
diff -r f37d9a0a1afc72945e1283195739fc31418dc934 -r 42b4bb82e006c42f7f2049c2b8460af5e08ddc24 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -209,25 +209,22 @@
<table class="grid"><tr><td><b>extension</b></td>
- <td><b>dtype</b></td>
+ <td><b>type</b></td><td><b>mimetype</b></td>
+ <td><b>subclass</b></td></tr>
%for datatypes_dict in datatypes_dicts:
- <%
- extension = datatypes_dict[ 'extension' ]
- dtype = datatypes_dict[ 'dtype' ]
- mimetype = datatypes_dict[ 'mimetype' ]
+ <%
+ extension = datatypes_dict.get( 'extension', ' ' )
+ dtype = datatypes_dict.get( 'dtype', ' ' )
+ mimetype = datatypes_dict.get( 'mimetype', ' ' )
+ subclass = datatypes_dict.get( 'subclass', ' ' )
%><tr><td>${extension}</td><td>${dtype}</td>
- <td>
- %if mimetype:
- ${mimetype}
- %else:
-
- %endif
- </td>
+ <td>${mimetype}</td>
+ <td>${subclass}</td></tr>
%endfor
</table>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f37d9a0a1afc/
changeset: f37d9a0a1afc
user: greg
date: 2011-12-16 17:19:29
summary: Enhance the datatypes registry to: (a) enable use of config files that do not include a "sniffers" tag set section, (b) not load a datatypes if the registry already includes a datatype with that extension.
affected #: 1 file
diff -r ef4a1377fcc62bad6feb11df126339c639b7e56a -r f37d9a0a1afc72945e1283195739fc31418dc934 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -62,7 +62,10 @@
mimetype = elem.get( 'mimetype', None )
display_in_upload = elem.get( 'display_in_upload', False )
make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
- if extension and ( dtype or type_extension ):
+ if extension and extension in self.datatypes_by_extension:
+ self.log.debug( "Ignoring datatype with extension '%s' from '%s' because the registry already includes a datatype with that extension." \
+ % ( extension, config ) )
+ elif extension and ( dtype or type_extension ):
if dtype:
fields = dtype.split( ':' )
datatype_module = fields[0]
@@ -142,23 +145,24 @@
d_type1.add_display_application( display_app )
# Load datatype sniffers from the config
sniffers = root.find( 'sniffers' )
- for elem in sniffers.findall( 'sniffer' ):
- dtype = elem.get( 'type', None )
- if dtype:
- try:
- fields = dtype.split( ":" )
- datatype_module = fields[0]
- datatype_class = fields[1]
- module = __import__( datatype_module )
- for comp in datatype_module.split('.')[1:]:
- module = getattr(module, comp)
- aclass = getattr( module, datatype_class )()
- self.sniff_order.append( aclass )
- self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
- except Exception, exc:
- self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
- #default values
- if len(self.datatypes_by_extension) < 1:
+ if sniffers:
+ for elem in sniffers.findall( 'sniffer' ):
+ dtype = elem.get( 'type', None )
+ if dtype:
+ try:
+ fields = dtype.split( ":" )
+ datatype_module = fields[0]
+ datatype_class = fields[1]
+ module = __import__( datatype_module )
+ for comp in datatype_module.split('.')[1:]:
+ module = getattr(module, comp)
+ aclass = getattr( module, datatype_class )()
+ self.sniff_order.append( aclass )
+ self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
+ except Exception, exc:
+ self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
+ # Default values.
+ if not self.datatypes_by_extension:
self.datatypes_by_extension = {
'ab1' : binary.Ab1(),
'axt' : sequence.Axt(),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Custom builds: (a) reinstitute support for len files/len entries and (b) improve UI so that it is easy to specify build definition and help is accurate.
by Bitbucket 15 Dec '11
by Bitbucket 15 Dec '11
15 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ef4a1377fcc6/
changeset: ef4a1377fcc6
user: jgoecks
date: 2011-12-15 22:21:39
summary: Custom builds: (a) reinstitute support for len files/len entries and (b) improve UI so that it is easy to specify build definition and help is accurate.
affected #: 2 files
diff -r fdcdc0115a123aa40bc46e69df769f171837534e -r ef4a1377fcc62bad6feb11df126339c639b7e56a lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -1198,14 +1198,57 @@
# Add new custom build.
name = kwds.get('name', '')
key = kwds.get('key', '')
- dataset_id = kwds.get('dataset_id', '')
- if not name or not key or not dataset_id:
+
+ # Look for build's chrom info in len_file and len_text.
+ len_file = kwds.get( 'len_file', None )
+ if getattr( len_file, "file", None ): # Check if it's a FieldStorage object
+ len_text = len_file.file.read()
+ else:
+ len_text = kwds.get( 'len_text', None )
+
+ if not len_text:
+ # Using FASTA from history.
+ dataset_id = kwds.get('dataset_id', '')
+
+ if not name or not key or not ( len_text or dataset_id ):
message = "You must specify values for all the fields."
elif key in dbkeys:
message = "There is already a custom build with that key. Delete it first if you want to replace it."
else:
- dataset_id = trans.security.decode_id( dataset_id )
- dbkeys[key] = { "name": name, "fasta": dataset_id }
+ # Have everything needed; create new build.
+ build_dict = { "name": name }
+ if len_text:
+ # Create new len file
+ new_len = trans.app.model.HistoryDatasetAssociation( extension="len", create_dataset=True, sa_session=trans.sa_session )
+ trans.sa_session.add( new_len )
+ new_len.name = name
+ new_len.visible = False
+ new_len.state = trans.app.model.Job.states.OK
+ new_len.info = "custom build .len file"
+ trans.sa_session.flush()
+ counter = 0
+ f = open(new_len.file_name, "w")
+ # LEN files have format:
+ # <chrom_name><tab><chrom_length>
+ for line in len_text.split("\n"):
+ lst = line.strip().rsplit(None, 1) # Splits at the last whitespace in the line
+ if not lst or len(lst) < 2:
+ lines_skipped += 1
+ continue
+ chrom, length = lst[0], lst[1]
+ try:
+ length = int(length)
+ except ValueError:
+ lines_skipped += 1
+ continue
+ counter += 1
+ f.write("%s\t%s\n" % (chrom, length))
+ f.close()
+ build_dict.update( { "len": new_len.id, "count": counter } )
+ else:
+ dataset_id = trans.security.decode_id( dataset_id )
+ build_dict[ "fasta" ] = dataset_id
+ dbkeys[key] = build_dict
# Save builds.
# TODO: use database table to save builds.
user.preferences['dbkeys'] = to_json_string(dbkeys)
diff -r fdcdc0115a123aa40bc46e69df769f171837534e -r ef4a1377fcc62bad6feb11df126339c639b7e56a templates/user/dbkeys.mako
--- a/templates/user/dbkeys.mako
+++ b/templates/user/dbkeys.mako
@@ -40,6 +40,15 @@
padding: 10px;
}
%endif
+ div.def_tab {
+ float: left;
+ padding: 0.2em 0.5em;
+ background-color: white;
+ }
+ div.def_tab.active {
+ background-color: #CCF;
+ border: solid 1px #66A;
+ }
</style></%def>
@@ -60,6 +69,43 @@
$("#show_installed_builds").click(function() {
$("#installed_builds").show();
});
+
+ // Set up behavior for build definition tab controls.
+ $("div.def_tab > a").each(function() {
+ $(this).click(function() {
+ var tab_id = $(this).attr("id");
+
+ // Hide all build inputs, help.
+ $("div.build_definition").children(":input").hide();
+ $(".infomessagesmall > div").hide();
+
+ // Show input item, help corresponding to tab id.
+ $("#" + tab_id + "_input").show();
+ $("." + tab_id + "_help").show();
+
+ // Update tabs.
+ $("div.def_tab").removeClass("active");
+ $(this).parent().addClass("active");
+ });
+ });
+
+ ## If there are fasta HDAs available, show fasta tab; otherwise show len file tab.
+ // Set starting tab.
+ % if fasta_hdas.first():
+ $("#fasta").click();
+ % else:
+ $("#len_file").click();
+ % endif
+
+ // Before submit, remove inputs not associated with the active tab.
+ $("#submit").click(function() {
+ var id = $(".active > a").attr("id");
+ $("div.build_definition").children(":input").each(function() {
+ if ( $(this).attr("id") !== (id + "_input") ) {
+ $(this).remove();
+ }
+ });
+ });
});
</script>
@@ -70,7 +116,6 @@
</%def><%def name="body()">
-
% if message:
<div class="errormessagelarge">${message}</div>
% elif lines_skipped > 0:
@@ -123,50 +168,82 @@
<hr /><h3>Add a Custom Build</h3><form action="dbkeys" method="post" enctype="multipart/form-data">
+ ## Custom build via fasta in history.
<div class="toolForm" style="float: left;"><div class="toolFormTitle">New Build</div><div class="toolFormBody"><div class="form-row">
- <label for="name">Build Name (eg: Mouse):</label>
+ <label for="name">Name (eg: Hamster):</label><input type="text" id="name" name="name" /></div><div class="form-row">
- <label for="key">Build Key (eg: mm9):</label>
+ <label for="key">Key (eg: hamster_v1):</label><input type="text" id="key" name="key" /></div>
- <div class="form-row">
- <label for="len_file">Build Genome:</label>
- <select name="dataset_id">
+ <div class="form-row build_definition">
+ <label>Definition:</label>
+ <div class="def_tab">
+ <a id="fasta" href="javascript:void(0)">FASTA</a>
+ </div>
+ <div class="def_tab">
+ <a id="len_file" href="javascript:void(0)">Len File</a>
+ </div>
+ <div class="def_tab">
+ <a id="len_entry" href="javascript:void(0)">Len Entry</a>
+ </div>
+ <div style="clear: both; padding-bottom: 0.5em"></div>
+ <select id="fasta_input" name="dataset_id">
%for dataset in fasta_hdas:
<option value="${trans.security.encode_id( dataset.id )}">${dataset.hid}: ${dataset.name}</option>
%endfor
</select>
- </div>
-
- <div class="form-row"><input type="submit" name="add" value="Submit"/></div>
+ <input type="file" id="len_file_input" name="len_file" /></input>
+ <textarea id="len_entry_input" name="len_text" cols="30" rows="8"></textarea>
+ </div>
+ <div class="form-row"><input id="submit" type="submit" name="add" value="Submit"/></div></div></div></form><div class="infomessagesmall" style="float: left; margin-left: 10px; width: 40%;">
- <h3>Length Format</h3>
- <p>
- The length format is two-column, separated by whitespace, of the form:
- <pre>chrom/contig length of chrom/contig</pre>
- </p>
- <p>
- For example, the first few entries of <em>mm9.len</em> are as follows:
- <pre>
-chr1 197195432
-chr2 181748087
-chr3 159599783
-chr4 155630120
-chr5 152537259
- </pre>
- </p>
+ <div class="fasta_help">
+ <h3>FASTA format</h3>
+ <p>
+ This is a multi-fasta file from your current history that provides the genome
+ sequences for each chromosome/contig in your build.
+ </p>
+
+ <p>
+ Here is a snippet from an example multi-fasta file:
+ <pre>
+ >chr1
+ ATTATATATAAGACCACAGAGAGAATATTTTGCCCGG...
+ >chr2
+ GGCGGCCGCGGCGATATAGAACTACTCATTATATATA...
+ ...
+ </pre>
+ </p>
+ </div>
+ <div class="len_file_help len_entry_help">
+ <h3>Length Format</h3>
+ <p>
+ The length format is two-column, separated by whitespace, of the form:
+ <pre>chrom/contig length of chrom/contig</pre>
+ </p>
+ <p>
+ For example, the first few entries of <em>mm9.len</em> are as follows:
+ <pre>
+ chr1 197195432
+ chr2 181748087
+ chr3 159599783
+ chr4 155630120
+ chr5 152537259
+ </pre>
+ </p>
- <p>Trackster uses this information to populate the select box for chrom/contig, and
- to set the maximum basepair of the track browser. You may either upload a .len file
- of this format, or directly enter the information into the box.</p>
-
+ <p>Trackster uses this information to populate the select box for chrom/contig, and
+ to set the maximum basepair of the track browser. You may either upload a .len file
+ of this format (Len File option), or directly enter the information into the box
+ (Len Entry option).</p>
+ </div></div></%def>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Reset repository metadata for installed tool shed repositories when a new update to the repository contents has been pulled from the tool shed.
by Bitbucket 15 Dec '11
by Bitbucket 15 Dec '11
15 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fdcdc0115a12/
changeset: fdcdc0115a12
user: greg
date: 2011-12-15 16:49:40
summary: Reset repository metadata for installed tool shed repositories when a new update to the repository contents has been pulled from the tool shed.
affected #: 3 files
diff -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 -r fdcdc0115a123aa40bc46e69df769f171837534e lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -73,18 +73,19 @@
if returncode == 0:
returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
- metadata_dict = load_repository_contents( self.app,
- name,
- description,
- self.repository_owner,
- changeset_revision,
- repository_clone_url,
- self.install_tool_config,
- self.tool_path,
- tool_section,
- relative_install_dir,
- current_working_dir,
- tmp_name )
+ metadata_dict = load_repository_contents( app=self.app,
+ name=name,
+ description=description,
+ owner=self.repository_owner,
+ changeset_revision=changeset_revision,
+ tool_path=self.tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_section=tool_section,
+ shed_tool_conf=self.install_tool_config,
+ new_install=True )
# Add a new record to the tool_id_guid_map table for each
# tool in the repository if one doesn't already exist.
if 'tools' in metadata_dict:
diff -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 -r fdcdc0115a123aa40bc46e69df769f171837534e lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,6 +2,7 @@
from datetime import date, datetime, timedelta
from time import strftime
from galaxy import util
+from galaxy.util.json import *
from galaxy.tools import ToolSection
from galaxy.tools.search import ToolBoxSearch
from galaxy.model.orm import *
@@ -77,7 +78,7 @@
os.chdir( current_working_dir )
tmp_stderr.close()
return returncode, tmp_name
-def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ):
+def create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ):
# This method is used by the InstallManager, which does not have access to trans.
sa_session = app.model.context.current
tmp_url = clean_repository_clone_url( repository_clone_url )
@@ -85,16 +86,13 @@
if not owner:
owner = get_repository_owner( tmp_url )
includes_datatypes = 'datatypes_config' in metadata_dict
- flush_needed = False
tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
if tool_shed_repository:
- if tool_shed_repository.deleted:
- tool_shed_repository.description = description
- tool_shed_repository.changeset_revision = changeset_revision
- tool_shed_repository.metadata = metadata_dict
- tool_shed_repository.includes_datatypes = includes_datatypes
- tool_shed_repository.deleted = False
- flush_needed = True
+ tool_shed_repository.description = description
+ tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.metadata = metadata_dict
+ tool_shed_repository.includes_datatypes = includes_datatypes
+ tool_shed_repository.deleted = False
else:
tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
name=name,
@@ -104,10 +102,8 @@
changeset_revision=changeset_revision,
metadata=metadata_dict,
includes_datatypes=includes_datatypes )
- flush_needed = True
- if flush_needed:
- sa_session.add( tool_shed_repository )
- sa_session.flush()
+ sa_session.add( tool_shed_repository )
+ sa_session.flush()
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""
Update the received metadata_dict with changes that have been applied
@@ -471,12 +467,17 @@
except Exception, e:
log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) )
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_module=imported_module )
-def load_repository_contents( app, name, description, owner, changeset_revision, repository_clone_url, shed_tool_conf,
- tool_path, tool_section, relative_install_dir, current_working_dir, tmp_name ):
+def load_repository_contents( app, name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir,
+ current_working_dir, tmp_name, tool_section=None, shed_tool_conf=None, new_install=True ):
# This method is used by the InstallManager, which does not have access to trans.
# Generate the metadata for the installed tool shed repository. It is imperative that
# the installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
+ # is set because the process for setting metadata uses the repository files on disk. This
+ # method is called when new tools have been installed (in which case values should be received
+ # for tool_section and shed_tool_conf, and new_install should be left at it's default value)
+ # and when updates have been pulled to previously installed repositories (in which case the
+ # default value None is set for tool_section and shed_tool_conf, and the value of new_install
+ # is passed as False).
metadata_dict = generate_metadata( app.toolbox, relative_install_dir, repository_clone_url )
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
@@ -497,42 +498,38 @@
repository_tools_tups = handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups )
# Handle tools that use fabric scripts to install dependencies.
handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
- # Generate a new entry for the tool config.
- elem_list = generate_tool_panel_elem_list( name,
- repository_clone_url,
- changeset_revision,
- repository_tools_tups,
- tool_section=tool_section,
- owner=owner )
- if tool_section:
- for section_elem in elem_list:
- # Load the section into the tool panel.
- app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path )
- else:
- # Load the tools into the tool panel outside of any sections.
- for tool_elem in elem_list:
- guid = tool_elem.get( 'guid' )
- app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
+ if new_install:
+ # Generate a new entry for the tool config.
+ elem_list = generate_tool_panel_elem_list( name,
+ repository_clone_url,
+ changeset_revision,
+ repository_tools_tups,
+ tool_section=tool_section,
+ owner=owner )
+ if tool_section:
+ for section_elem in elem_list:
+ # Load the section into the tool panel.
+ app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path )
+ else:
+ # Load the tools into the tool panel outside of any sections.
+ for tool_elem in elem_list:
+ guid = tool_elem.get( 'guid' )
+ app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
+ for elem_entry in elem_list:
+ # Append the new entry (either section or list of tools) to the shed_tool_config file.
+ add_shed_tool_conf_entry( app, shed_tool_conf, elem_entry )
+ if app.toolbox_search.enabled:
+ # If search support for tools is enabled, index the new installed tools.
+ app.toolbox_search = ToolBoxSearch( app.toolbox )
# Remove the temporary file
try:
os.unlink( tmp_name )
except:
pass
- for elem_entry in elem_list:
- # Append the new entry (either section or list of tools) to the shed_tool_config file.
- add_shed_tool_conf_entry( app, shed_tool_conf, elem_entry )
- if app.toolbox_search.enabled:
- # If search support for tools is enabled, index the new installed tools.
- app.toolbox_search = ToolBoxSearch( app.toolbox )
# Add a new record to the tool_shed_repository table if one doesn't
# already exist. If one exists but is marked deleted, undelete it.
- log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name )
- create_or_undelete_tool_shed_repository( app,
- name,
- description,
- changeset_revision,
- repository_clone_url,
- metadata_dict )
+ log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
+ create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict )
return metadata_dict
def pretty_print_xml( elem, level=0 ):
pad = ' '
diff -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 -r fdcdc0115a123aa40bc46e69df769f171837534e lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -237,18 +237,19 @@
returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
- metadata_dict = load_repository_contents( trans.app,
- name,
- description,
- owner,
- changeset_revision,
- repository_clone_url,
- shed_tool_conf,
- tool_path,
- tool_section,
- relative_install_dir,
- current_working_dir,
- tmp_name )
+ metadata_dict = load_repository_contents( app=trans.app,
+ name=name,
+ description=description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ new_install=True )
installed_repository_names.append( name )
else:
tmp_stderr = open( tmp_name, 'rb' )
@@ -302,7 +303,7 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, kwd[ 'id' ] )
description = util.restore_text( params.get( 'description', repository.description ) )
- relative_install_dir = self.__get_relative_install_dir( trans, repository )
+ tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
if params.get( 'edit_repository_button', False ):
if description != repository.description:
@@ -351,13 +352,28 @@
message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name
else:
current_working_dir = os.getcwd()
- relative_install_dir = self.__get_relative_install_dir( trans, repository )
+ tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
if relative_install_dir:
repo_files_dir = os.path.join( relative_install_dir, name )
returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
if returncode == 0:
returncode, tmp_name = update_repository( current_working_dir, repo_files_dir, latest_changeset_revision )
if returncode == 0:
+ # Update the repository metadata.
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ metadata_dict = load_repository_contents( app=trans.app,
+ name=name,
+ description=repository.description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_section=None,
+ shed_tool_conf=None,
+ new_install=False )
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
repository.update_available = False
@@ -408,8 +424,9 @@
metadata=metadata,
message=message,
status=status )
- def __get_relative_install_dir( self, trans, repository ):
- # Get the directory where the repository is install.
+ def __get_tool_path_and_relative_install_dir( self, trans, repository ):
+ # Return both the tool_path configured in the relative shed_tool_conf and
+ # the relative path to the directory where the repository is installed.
tool_shed = clean_tool_shed_url( repository.tool_shed )
partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
@@ -420,7 +437,7 @@
relative_install_dir = os.path.join( tool_path, partial_install_dir )
if os.path.isdir( relative_install_dir ):
break
- return relative_install_dir
+ return tool_path, relative_install_dir
def __generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
Generate a tool path that guarantees repositories with the same name will always be installed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0