[hg] galaxy 3777: Several Fixes for Uploading Composite datatype...
details: http://www.bx.psu.edu/hg/galaxy/rev/b762ea8e5508 changeset: 3777:b762ea8e5508 user: Dan Blankenberg <dan@bx.psu.edu> date: Wed May 12 15:57:05 2010 -0400 description: Several Fixes for Uploading Composite datatypes. Revert changeset 3769:69e70e588a8e. diffstat: lib/galaxy/tools/actions/upload_common.py | 3 +-- lib/galaxy/tools/parameters/grouping.py | 23 +++++++++++++++-------- lib/galaxy/web/controllers/tool_runner.py | 2 +- tools/data_source/upload.py | 2 +- 4 files changed, 18 insertions(+), 12 deletions(-) diffs (101 lines): diff -r 997d55c65f49 -r b762ea8e5508 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py Wed May 12 15:18:40 2010 -0400 +++ b/lib/galaxy/tools/actions/upload_common.py Wed May 12 15:57:05 2010 -0400 @@ -102,8 +102,7 @@ for data in precreated_datasets: log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) ) data.state = data.states.ERROR - data.info = 'Unused precreated dataset to be deleted.' - data.deleted = True + data.info = 'No file contents were available.' def new_history_upload( trans, uploaded_dataset, state=None ): hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name, diff -r 997d55c65f49 -r b762ea8e5508 lib/galaxy/tools/parameters/grouping.py --- a/lib/galaxy/tools/parameters/grouping.py Wed May 12 15:18:40 2010 -0400 +++ b/lib/galaxy/tools/parameters/grouping.py Wed May 12 15:57:05 2010 -0400 @@ -99,6 +99,17 @@ self.default_file_type = 'txt' self.file_type_to_ext = { 'auto':self.default_file_type } self.metadata_ref = 'files_metadata' + def get_composite_dataset_name( self, context ): + #FIXME: HACK + #Special case of using 'base_name' metadata for use as Dataset name needs to be done in a General Fashion, as defined within a particular Datatype. + + #We get two different types of contexts here, one straight from submitted parameters, the other after being parsed into tool inputs + dataset_name = context.get('files_metadata|base_name', None ) + if dataset_name is None: + dataset_name = context.get('files_metadata', {} ).get( 'base_name', None ) + if dataset_name is None: + dataset_name = 'Uploaded Composite Dataset (%s)' % self.get_file_type( context ) + return dataset_name def get_file_base_name( self, context ): fd = context.get('files_metadata|base_name','Galaxy_Composite_file') return fd @@ -241,6 +252,7 @@ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ): if file_bunch.path: break + file_bunch.space_to_tab = space_to_tab return file_bunch, warnings def get_filenames( context ): rval = [] @@ -287,24 +299,18 @@ if meta_spec.set_in_upload: if meta_name in files_metadata: dataset.metadata[ meta_name ] = files_metadata[ meta_name ] - dataset_name = None - dataset_info = None + dataset.precreated_name = dataset.name = self.get_composite_dataset_name( context ) if dataset.datatype.composite_type == 'auto_primary_file': #replace sniff here with just creating an empty file temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file( dataset ) ), prefix='upload_auto_primary_file' ) dataset.primary_file = temp_name dataset.space_to_tab = False - dsn = dataset.metadata.get('base_name','Uploaded Composite Dataset (%s)' % file_type) - dataset.precreated_name = dataset.name = dsn else: file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] ) writable_files_offset = 1 dataset.primary_file = file_bunch.path dataset.space_to_tab = file_bunch.space_to_tab - dsn = dataset.metadata.get('base_name',file_bunch.precreated_name) - dataset.precreated_name = dsn - dataset.name = dsn - dataset.warnings.extend( file_bunch.warnings ) + dataset.warnings.extend( warnings ) if dataset.primary_file is None:#remove this before finish, this should create an empty dataset raise Exception( 'No primary dataset file was available for composite upload' ) keys = [ value.name for value in writable_files.values() ] @@ -315,6 +321,7 @@ dataset.composite_files[ key ] = None else: file_bunch, warnings = get_one_filename( group_incoming ) + dataset.warnings.extend( warnings ) if file_bunch.path: dataset.composite_files[ key ] = file_bunch.__dict__ else: diff -r 997d55c65f49 -r b762ea8e5508 lib/galaxy/web/controllers/tool_runner.py --- a/lib/galaxy/web/controllers/tool_runner.py Wed May 12 15:18:40 2010 -0400 +++ b/lib/galaxy/web/controllers/tool_runner.py Wed May 12 15:57:05 2010 -0400 @@ -203,7 +203,7 @@ d_type = dataset_upload_input.get_datatype( trans, kwd ) if d_type.composite_type is not None: - datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ) ) ) + datasets.append( create_dataset( dataset_upload_input.get_composite_dataset_name( kwd ) ) ) else: params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) if params.file_data not in [ None, "" ]: diff -r 997d55c65f49 -r b762ea8e5508 tools/data_source/upload.py --- a/tools/data_source/upload.py Wed May 12 15:18:40 2010 -0400 +++ b/tools/data_source/upload.py Wed May 12 15:57:05 2010 -0400 @@ -299,7 +299,7 @@ break elif dataset.composite_file_paths[value.name] is not None: if not value.is_binary: - if uploaded_dataset.composite_files[ value.name ].space_to_tab: + if value.space_to_tab: sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] ) else: sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
participants (1)
-
Nate Coraor