2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/723bd4c7197a/
Changeset: 723bd4c7197a
Branch: twobit_upload
User: ghuls
Date: 2013-05-27 15:02:06
Summary: Fix detection of 2bit files when uploading them.
When 2bit files are uploaded, they were not detected as 2bit files:
Example:
test.2bit
empty
format: twobit, database: hg19
The uploaded binary file contains inappropriate content
This issue is solved by registering the TwoBit file format in:
./lib/galaxy/datatypes/binary.py
Affected #: 1 file
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 723bd4c7197aec7ac42a20f07f48112cd5654a29 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -399,3 +399,5 @@
return dataset.peek
except:
return "Binary TwoBit format nucleotide file (%s)" % (data.nice_size(dataset.get_size()))
+
+Binary.register_sniffable_binary_format("twobit", "twobit", TwoBit)
https://bitbucket.org/galaxy/galaxy-central/commits/eb910f78fa5b/
Changeset: eb910f78fa5b
User: dannon
Date: 2013-06-15 19:25:44
Summary: Merged in ghuls/galaxy-central/twobit_upload (pull request #170)
Fix detection of 2bit files when uploading them.
Affected #: 1 file
diff -r b186609ec0cd60c8401b18b1c849745ce17ef0bd -r eb910f78fa5bb75eed2b967d1bdac69b2ae47894 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -399,3 +399,5 @@
return dataset.peek
except:
return "Binary TwoBit format nucleotide file (%s)" % (data.nice_size(dataset.get_size()))
+
+Binary.register_sniffable_binary_format("twobit", "twobit", TwoBit)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2efb1083676b/
Changeset: 2efb1083676b
Branch: stable
User: carlfeberhard
Date: 2013-06-14 21:06:56
Summary: Remove print statements from api/search, ctrl/visualization, ctrl/workflow
Affected #: 3 files
diff -r 68c705bf0325a54d2424a7b8ed44232c57a3b506 -r 2efb1083676b56c4a2678e43600fd707756fca27 lib/galaxy/webapps/galaxy/api/search.py
--- a/lib/galaxy/webapps/galaxy/api/search.py
+++ b/lib/galaxy/webapps/galaxy/api/search.py
@@ -30,7 +30,6 @@
current_user_roles = trans.get_current_user_roles()
try:
results = query.process(trans)
- print results
except Exception, e:
return {'error' : str(e)}
for item in results:
diff -r 68c705bf0325a54d2424a7b8ed44232c57a3b506 -r 2efb1083676b56c4a2678e43600fd707756fca27 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -848,7 +848,6 @@
get the visualization with the given id; otherwise, create a new visualization using
a given dataset and regions.
"""
- print 'sweepster:', id, hda_ldda, dataset_id, regions
regions = regions or '{}'
# Need to create history if necessary in order to create tool form.
trans.get_history( create=True )
@@ -861,7 +860,6 @@
else:
# Loading new visualization.
dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
- print 'dataset:', dataset
job = get_dataset_job( dataset )
viz_config = {
'dataset_id': dataset_id,
diff -r 68c705bf0325a54d2424a7b8ed44232c57a3b506 -r 2efb1083676b56c4a2678e43600fd707756fca27 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -132,7 +132,6 @@
@web.expose
def index( self, trans ):
- print trans.request.host_url
return self.list( trans )
@web.expose
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/12a45cc8c67f/
Changeset: 12a45cc8c67f
User: carlfeberhard
Date: 2013-06-14 21:01:12
Summary: Remove print statements from api/search, ctrl/visualization, ctrl/workflow
Affected #: 3 files
diff -r 48b7d75bbb339e27a6db906dd5b1b2a2dbbaaf71 -r 12a45cc8c67f86f595f50bfcf2155892f5eafeb0 lib/galaxy/webapps/galaxy/api/search.py
--- a/lib/galaxy/webapps/galaxy/api/search.py
+++ b/lib/galaxy/webapps/galaxy/api/search.py
@@ -30,7 +30,6 @@
current_user_roles = trans.get_current_user_roles()
try:
results = query.process(trans)
- print results
except Exception, e:
return {'error' : str(e)}
for item in results:
diff -r 48b7d75bbb339e27a6db906dd5b1b2a2dbbaaf71 -r 12a45cc8c67f86f595f50bfcf2155892f5eafeb0 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -848,7 +848,6 @@
get the visualization with the given id; otherwise, create a new visualization using
a given dataset and regions.
"""
- print 'sweepster:', id, hda_ldda, dataset_id, regions
regions = regions or '{}'
# Need to create history if necessary in order to create tool form.
trans.get_history( create=True )
@@ -861,7 +860,6 @@
else:
# Loading new visualization.
dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
- print 'dataset:', dataset
job = get_dataset_job( dataset )
viz_config = {
'dataset_id': dataset_id,
diff -r 48b7d75bbb339e27a6db906dd5b1b2a2dbbaaf71 -r 12a45cc8c67f86f595f50bfcf2155892f5eafeb0 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -132,7 +132,6 @@
@web.expose
def index( self, trans ):
- print trans.request.host_url
return self.list( trans )
@web.expose
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/56b18b56417f/
Changeset: 56b18b56417f
User: jmchilton
Date: 2012-11-10 21:26:29
Summary: Refactor setting up valid extensions in DataToolParameter to eliminate code duplication, extra computation (multiple loading of same datatypes in unit tests for instance), and reduce code nesting.
Affected #: 1 file
diff -r 340438c62171578078323d39da398d5053b69d0a -r 56b18b56417f0c2613c79055b21170a3a92379ac lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1371,24 +1371,25 @@
# Add metadata validator
if not string_as_bool( elem.get( 'no_validation', False ) ):
self.validators.append( validation.MetadataValidator() )
+ # Find datatypes_registry
+ if tool is None:
+ if trans:
+ # Must account for "Input Dataset" types, which while not a tool still need access to the real registry.
+ # A handle to the transaction (and thus app) will be given by the module.
+ datatypes_registry = trans.app.datatypes_registry
+ else:
+ #This occurs for things such as unit tests
+ import galaxy.datatypes.registry
+ datatypes_registry = galaxy.datatypes.registry.Registry()
+ datatypes_registry.load_datatypes()
+ else:
+ datatypes_registry = tool.app.datatypes_registry
# Build tuple of classes for supported data formats
formats = []
self.extensions = elem.get( 'format', 'data' ).split( "," )
for extension in self.extensions:
extension = extension.strip()
- if tool is None:
- if trans:
- # Must account for "Input Dataset" types, which while not a tool still need access to the real registry.
- # A handle to the transaction (and thus app) will be given by the module.
- formats.append( trans.app.datatypes_registry.get_datatype_by_extension( extension.lower() ).__class__ )
- else:
- #This occurs for things such as unit tests
- import galaxy.datatypes.registry
- datatypes_registry = galaxy.datatypes.registry.Registry()
- datatypes_registry.load_datatypes()
- formats.append( datatypes_registry.get_datatype_by_extension( extension.lower() ).__class__ )
- else:
- formats.append( tool.app.datatypes_registry.get_datatype_by_extension( extension.lower() ).__class__ )
+ formats.append( datatypes_registry.get_datatype_by_extension( extension.lower() ).__class__ )
self.formats = tuple( formats )
self.multiple = string_as_bool( elem.get( 'multiple', False ) )
# TODO: Enhance dynamic options for DataToolParameters. Currently,
https://bitbucket.org/galaxy/galaxy-central/commits/b999689a0130/
Changeset: b999689a0130
User: jmchilton
Date: 2012-11-15 15:11:02
Summary: Centeralize logic for checking if a given galaxy datatype is any of the datatypes in a given iterator (tuple, list) (used when building up tool input options and evaluating tools, checking conversions, etc...). I think this is a solid (though slight) design improvement in its own right, but it will also ease the eventually merging of composite multifiles (https://bitbucket.org/galaxyp/galaxy-central-homogeneous-composite-datatypes) into galaxy-central or at least limit the divergence between galaxy-central and that fork long term.
Affected #: 4 files
diff -r 56b18b56417f0c2613c79055b21170a3a92379ac -r b999689a013025f2a009e313a3b6e6f65354e005 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -529,7 +529,13 @@
def has_resolution(self):
return False
-
+ def matches_any( self, target_datatypes ):
+ """
+ Check if this datatype is of any of the target_datatypes or is
+ a subtype thereof.
+ """
+ datatype_classes = tuple( [ datatype.__class__ for datatype in target_datatypes ] )
+ return isinstance( self, datatype_classes )
def merge( split_files, output_file):
"""
diff -r 56b18b56417f0c2613c79055b21170a3a92379ac -r b999689a013025f2a009e313a3b6e6f65354e005 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -586,7 +586,7 @@
def find_conversion_destination_for_dataset_by_extensions( self, dataset, accepted_formats, converter_safe = True ):
"""Returns ( target_ext, existing converted dataset )"""
for convert_ext in self.get_converters_by_datatype( dataset.ext ):
- if isinstance( self.get_datatype_by_extension( convert_ext ), accepted_formats ):
+ if self.get_datatype_by_extension( convert_ext ).matches_any( accepted_formats ):
converted_dataset = dataset.get_converted_files_by_type( convert_ext )
if converted_dataset:
ret_data = converted_dataset
diff -r 56b18b56417f0c2613c79055b21170a3a92379ac -r b999689a013025f2a009e313a3b6e6f65354e005 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -36,7 +36,7 @@
def process_dataset( data, formats = None ):
if formats is None:
formats = input.formats
- if data and not isinstance( data.datatype, formats ):
+ if data and not data.datatype.matches_any( formats ):
# Need to refresh in case this conversion just took place, i.e. input above in tool performed the same conversion
trans.sa_session.refresh( data )
target_ext, converted_dataset = data.find_conversion_destination( formats )
@@ -66,7 +66,7 @@
conversions = []
for conversion_name, conversion_extensions, conversion_datatypes in input.conversions:
new_data = process_dataset( input_datasets[ prefix + input.name + str( i + 1 ) ], conversion_datatypes )
- if not new_data or isinstance( new_data.datatype, conversion_datatypes ):
+ if not new_data or new_data.datatype.matches_any( conversion_datatypes ):
input_datasets[ prefix + conversion_name + str( i + 1 ) ] = new_data
conversions.append( ( conversion_name, new_data ) )
else:
@@ -86,7 +86,7 @@
conversions = []
for conversion_name, conversion_extensions, conversion_datatypes in input.conversions:
new_data = process_dataset( input_datasets[ prefix + input.name ], conversion_datatypes )
- if not new_data or isinstance( new_data.datatype, conversion_datatypes ):
+ if not new_data or new_data.datatype.matches_any( conversion_datatypes ):
input_datasets[ prefix + conversion_name ] = new_data
conversions.append( ( conversion_name, new_data ) )
else:
@@ -261,7 +261,9 @@
ext = input_ext
if output.format_source is not None and output.format_source in inp_data:
try:
- ext = inp_data[output.format_source].ext
+ input_dataset = inp_data[output.format_source]
+ input_extension = input_dataset.ext
+ ext = input_extension
except Exception, e:
pass
diff -r 56b18b56417f0c2613c79055b21170a3a92379ac -r b999689a013025f2a009e313a3b6e6f65354e005 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -785,7 +785,7 @@
if isinstance( dep_value, RuntimeValue ):
return True
#dataset not ready yet
- if hasattr( self, 'ref_input' ) and isinstance( dep_value, self.tool.app.model.HistoryDatasetAssociation ) and ( dep_value.is_pending or not isinstance( dep_value.datatype, self.ref_input.formats ) ):
+ if hasattr( self, 'ref_input' ) and isinstance( dep_value, self.tool.app.model.HistoryDatasetAssociation ) and ( dep_value.is_pending or not dep_value.datatype.matches_any( self.ref_input.formats ) ):
return True
# Dynamic, but all dependenceis are known and have values
return False
@@ -1037,7 +1037,7 @@
if not dataset.metadata.columns:
# Only allow late validation if the dataset is not yet ready
# (since we have reason to expect the metadata to be ready eventually)
- if dataset.is_pending or not isinstance( dataset.datatype, self.ref_input.formats ):
+ if dataset.is_pending or not dataset.datatype.matches_any( self.ref_input.formats ):
return True
# No late validation
return False
@@ -1389,8 +1389,8 @@
self.extensions = elem.get( 'format', 'data' ).split( "," )
for extension in self.extensions:
extension = extension.strip()
- formats.append( datatypes_registry.get_datatype_by_extension( extension.lower() ).__class__ )
- self.formats = tuple( formats )
+ formats.append( datatypes_registry.get_datatype_by_extension( extension.lower() ) )
+ self.formats = formats
self.multiple = string_as_bool( elem.get( 'multiple', False ) )
# TODO: Enhance dynamic options for DataToolParameters. Currently,
# only the special case key='build' of type='data_meta' is
@@ -1414,7 +1414,7 @@
conv_extensions = conv_elem.get( "type" ) #target datatype extension
# FIXME: conv_extensions should be able to be an ordered list
assert None not in [ name, type ], 'A name (%s) and type (%s) are required for explicit conversion' % ( name, type )
- conv_types = tool.app.datatypes_registry.get_datatype_by_extension( conv_extensions.lower() ).__class__
+ conv_types = tool.app.datatypes_registry.get_datatype_by_extension( conv_extensions.lower() )
self.conversions.append( ( name, conv_extensions, conv_types ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
@@ -1452,7 +1452,7 @@
continue
if self.options and self._options_filter_attribute( hda ) != filter_value:
continue
- if isinstance( hda.datatype, self.formats):
+ if hda.datatype.matches_any( self.formats ):
selected = ( value and ( hda in value ) )
if hda.visible:
hidden_text = ""
@@ -1519,7 +1519,7 @@
for i, data in enumerate( datasets ):
if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED]:
is_valid = False
- if isinstance( data.datatype, self.formats ):
+ if data.datatype.matches_any( self.formats ):
is_valid = True
else:
target_ext, converted_dataset = data.find_conversion_destination( self.formats )
https://bitbucket.org/galaxy/galaxy-central/commits/c011bbda72b3/
Changeset: c011bbda72b3
User: jmchilton
Date: 2012-11-15 15:23:15
Summary: Small refactoring of logic used to build up list of valid formats for an input data parameter.
Affected #: 1 file
diff -r b999689a013025f2a009e313a3b6e6f65354e005 -r c011bbda72b3e3fd974a759a53e4baef7b6121a2 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1387,9 +1387,9 @@
# Build tuple of classes for supported data formats
formats = []
self.extensions = elem.get( 'format', 'data' ).split( "," )
- for extension in self.extensions:
- extension = extension.strip()
- formats.append( datatypes_registry.get_datatype_by_extension( extension.lower() ) )
+ normalized_extensions = [extension.strip().lower() for extension in self.extensions]
+ for extension in normalized_extensions:
+ formats.append( datatypes_registry.get_datatype_by_extension( extension ) )
self.formats = formats
self.multiple = string_as_bool( elem.get( 'multiple', False ) )
# TODO: Enhance dynamic options for DataToolParameters. Currently,
https://bitbucket.org/galaxy/galaxy-central/commits/ada2dca97078/
Changeset: ada2dca97078
User: jmchilton
Date: 2012-12-04 06:04:36
Summary: Leave process_dataset immediately if input data is `None` in collect_input_datasets of DefaultToolAction, simplifies code and eliminates some unneeded computation.
Affected #: 1 file
diff -r c011bbda72b3e3fd974a759a53e4baef7b6121a2 -r ada2dca97078b552d12b28e7da65d9667e28b125 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -34,9 +34,11 @@
input_datasets = dict()
def visitor( prefix, input, value, parent = None ):
def process_dataset( data, formats = None ):
+ if not data:
+ return data
if formats is None:
formats = input.formats
- if data and not data.datatype.matches_any( formats ):
+ if not data.datatype.matches_any( formats ):
# Need to refresh in case this conversion just took place, i.e. input above in tool performed the same conversion
trans.sa_session.refresh( data )
target_ext, converted_dataset = data.find_conversion_destination( formats )
@@ -54,7 +56,7 @@
trans.sa_session.flush()
data = new_data
current_user_roles = trans.get_current_user_roles()
- if data and not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+ if not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
raise "User does not have permission to use a dataset (%s) provided for input." % data.id
return data
if isinstance( input, DataToolParameter ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.