commit/galaxy-central: 7 new changesets
7 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/563a28409672/ Changeset: 563a28409672 User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: Allow multiple tool data table config files... ... use this functionality to implement tests for tool data API. Affected #: 5 files diff -r 6363c839528ca8bbbbc60c1b3784c788468d9445 -r 563a28409672988f6b30545ba987017e7da2e14c lib/galaxy/tools/data/__init__.py --- a/lib/galaxy/tools/data/__init__.py +++ b/lib/galaxy/tools/data/__init__.py @@ -31,8 +31,10 @@ # at server startup. If tool shed repositories are installed that contain a valid file named tool_data_table_conf.xml.sample, entries # from that file are inserted into this dict at the time of installation. self.data_tables = {} - if config_filename: - self.load_from_config_file( config_filename, self.tool_data_path, from_shed_config=False ) + for single_config_filename in util.listify( config_filename ): + if not single_config_filename: + continue + self.load_from_config_file( single_config_filename, self.tool_data_path, from_shed_config=False ) def __getitem__( self, key ): return self.data_tables.__getitem__( key ) diff -r 6363c839528ca8bbbbc60c1b3784c788468d9445 -r 563a28409672988f6b30545ba987017e7da2e14c scripts/functional_tests.py --- a/scripts/functional_tests.py +++ b/scripts/functional_tests.py @@ -231,7 +231,16 @@ start_server = 'GALAXY_TEST_EXTERNAL' not in os.environ tool_data_table_config_path = None if os.path.exists( 'tool_data_table_conf.test.xml' ): + # If explicitly defined tables for test, use those. tool_data_table_config_path = 'tool_data_table_conf.test.xml' + else: + # ... otherise find whatever Galaxy would use as the default and + # the sample data for fucntional tests to that. + default_tool_data_config = 'config/tool_data_table_conf.xml.sample' + for tool_data_config in ['config/tool_data_table_conf.xml', 'tool_data_table_conf.xml' ]: + if os.path.exists( tool_data_config ): + default_tool_data_config = tool_data_config + tool_data_table_config_path = '%s,test/functional/tool-data/sample_tool_data_tables.xml' % default_tool_data_config shed_tool_data_table_config = 'config/shed_tool_data_table_conf.xml' tool_dependency_dir = os.environ.get( 'GALAXY_TOOL_DEPENDENCY_DIR', None ) use_distributed_object_store = os.environ.get( 'GALAXY_USE_DISTRIBUTED_OBJECT_STORE', False ) diff -r 6363c839528ca8bbbbc60c1b3784c788468d9445 -r 563a28409672988f6b30545ba987017e7da2e14c test/api/test_tool_data.py --- /dev/null +++ b/test/api/test_tool_data.py @@ -0,0 +1,30 @@ +""" Tests for the tool data API. +""" +from base import api + +import operator + + +class ToolDataApiTestCase( api.ApiTestCase ): + + def test_admin_only( self ): + index_response = self._get( "tool_data", admin=False ) + self._assert_status_code_is( index_response, 403 ) + + def test_list(self): + index_response = self._get( "tool_data", admin=True ) + self._assert_status_code_is( index_response, 200 ) + print index_response.content + index = index_response.json() + assert "testalpha" in map(operator.itemgetter("name"), index) + + def test_show(self): + show_response = self._get( "tool_data/testalpha", admin=True ) + self._assert_status_code_is( show_response, 200 ) + print show_response.content + data_table = show_response.json() + assert data_table["columns"] == ["value", "name", "path"] + first_entry = data_table["fields"][0] + assert first_entry[0] == "data1" + assert first_entry[1] == "data1name" + assert first_entry[2].endswith("test/functional/tool-data/data1/entry.txt") diff -r 6363c839528ca8bbbbc60c1b3784c788468d9445 -r 563a28409672988f6b30545ba987017e7da2e14c test/functional/tool-data/sample_tool_data_tables.xml --- /dev/null +++ b/test/functional/tool-data/sample_tool_data_tables.xml @@ -0,0 +1,12 @@ +<tables> + <table name="testalpha" comment_char="#"> + <columns>value, name, path</columns> + <file path="${__HERE__}/testalpha.loc" /> + </table> + <!-- + <table name="testbeta" comment_char="#"> + <columns>value, foo, bar, path</columns> + <file path="${__HERE__}/testbeta.loc" /> + </table> + --> +</tables> diff -r 6363c839528ca8bbbbc60c1b3784c788468d9445 -r 563a28409672988f6b30545ba987017e7da2e14c test/functional/tool-data/testalpha.loc --- /dev/null +++ b/test/functional/tool-data/testalpha.loc @@ -0,0 +1,2 @@ +data1 data1name ${__HERE__}/data1/entry.txt +data2 data2name ${__HERE__}/data2/entry.txt https://bitbucket.org/galaxy/galaxy-central/commits/8795d0fe13c4/ Changeset: 8795d0fe13c4 User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: PEP-8 fixes for lib/galaxy/tools/data/__init__.py. Affected #: 1 file diff -r 563a28409672988f6b30545ba987017e7da2e14c -r 8795d0fe13c45804ea53f4543c71ee7ca6f41a0c lib/galaxy/tools/data/__init__.py --- a/lib/galaxy/tools/data/__init__.py +++ b/lib/galaxy/tools/data/__init__.py @@ -9,9 +9,7 @@ import logging import os import os.path -import shutil import string -import tempfile from galaxy import util from galaxy.util.odict import odict @@ -22,6 +20,7 @@ DEFAULT_TABLE_TYPE = 'tabular' + class ToolDataTableManager( object ): """Manages a collection of tool data tables""" @@ -81,7 +80,7 @@ log.debug( "Loaded tool data table '%s'", table.name ) else: log.debug( "Loading another instance of data table '%s', attempting to merge content.", table.name ) - self.data_tables[ table.name ].merge_tool_data_table( table, allow_duplicates=False ) #only merge content, do not persist to disk, do not allow duplicate rows when merging + self.data_tables[ table.name ].merge_tool_data_table( table, allow_duplicates=False ) # only merge content, do not persist to disk, do not allow duplicate rows when merging # FIXME: This does not account for an entry with the same unique build ID, but a different path. return table_elems @@ -128,7 +127,7 @@ """ if not ( new_elems or remove_elems ): log.debug( 'ToolDataTableManager.to_xml_file called without any elements to add or remove.' ) - return #no changes provided, no need to persist any changes + return # no changes provided, no need to persist any changes if not new_elems: new_elems = [] if not remove_elems: @@ -175,7 +174,7 @@ assert table_type in tool_data_table_types, "Unknown data table type '%s'" % type return tool_data_table_types[ table_type ]( table_elem, tool_data_path, from_shed_config=from_shed_config, filename=filename ) - def __init__( self, config_element, tool_data_path, from_shed_config = False, filename=None ): + def __init__( self, config_element, tool_data_path, from_shed_config=False, filename=None ): self.name = config_element.get( 'name' ) self.comment_char = config_element.get( 'comment_char' ) self.empty_field_value = config_element.get( 'empty_field_value', '' ) @@ -187,7 +186,7 @@ # increment this variable any time a new entry is added, or when the table is totally reloaded # This value has no external meaning, and does not represent an abstract version of the underlying data self._loaded_content_version = 1 - self._load_info = ( [ config_element, tool_data_path ], { 'from_shed_config':from_shed_config } ) + self._load_info = ( [ config_element, tool_data_path ], { 'from_shed_config': from_shed_config } ) self._merged_load_info = [] def _update_version( self, version=None ): @@ -252,13 +251,13 @@ type_key = 'tabular' - def __init__( self, config_element, tool_data_path, from_shed_config = False, filename=None ): + def __init__( self, config_element, tool_data_path, from_shed_config=False, filename=None ): super( TabularToolDataTable, self ).__init__( config_element, tool_data_path, from_shed_config, filename) self.config_element = config_element self.data = [] self.configure_and_load( config_element, tool_data_path, from_shed_config) - def configure_and_load( self, config_element, tool_data_path, from_shed_config = False): + def configure_and_load( self, config_element, tool_data_path, from_shed_config=False): """ Configure and load table from an XML element. """ @@ -323,7 +322,6 @@ else: log.debug( "Filename '%s' already exists in filenames (%s), not adding", filename, self.filenames.keys() ) - def merge_tool_data_table( self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ): assert self.columns == other_table.columns, "Merging tabular data tables with non matching columns is not allowed: %s:%s != %s:%s" % ( self.name, self.columns, other_table.name, other_table.columns ) #merge filename info @@ -350,7 +348,7 @@ for i, field in enumerate( fields ): field_name = named_colums[i] if field_name is None: - field_name = i #check that this is supposed to be 0 based. + field_name = i # check that this is supposed to be 0 based. field_dict[ field_name ] = field rval.append( field_dict ) return rval @@ -508,7 +506,7 @@ data_table_fh = open( filename, 'wb' ) if os.stat( filename )[6] != 0: # ensure last existing line ends with new line - data_table_fh.seek( -1, 2 ) #last char in file + data_table_fh.seek( -1, 2 ) # last char in file last_char = data_table_fh.read( 1 ) if last_char not in [ '\n', '\r' ]: data_table_fh.write( '\n' ) @@ -577,7 +575,7 @@ def to_dict(self, view='collection'): rval = super(TabularToolDataTable, self).to_dict() if view == 'element': - rval['columns'] = sorted(self.columns.keys(), key=lambda x:self.columns[x]) + rval['columns'] = sorted(self.columns.keys(), key=lambda x: self.columns[x]) rval['fields'] = self.get_fields() return rval https://bitbucket.org/galaxy/galaxy-central/commits/6d54a7fd1fe9/ Changeset: 6d54a7fd1fe9 User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: Small improvements to tool data API. Update tool data API to use newer API decorator for better error handling, create some abstractions for reuse down stream, etc... Affected #: 1 file diff -r 8795d0fe13c45804ea53f4543c71ee7ca6f41a0c -r 6d54a7fd1fe9edfdf2f5c2fdb1b5ddb7eebdf59a lib/galaxy/webapps/galaxy/api/tool_data.py --- a/lib/galaxy/webapps/galaxy/api/tool_data.py +++ b/lib/galaxy/webapps/galaxy/api/tool_data.py @@ -1,4 +1,6 @@ +from galaxy import exceptions from galaxy import web +from galaxy.web import _future_expose_api as expose_api from galaxy.web.base.controller import BaseAPIController @@ -8,21 +10,21 @@ """ @web.require_admin - @web.expose_api + @expose_api def index( self, trans, **kwds ): """ GET /api/tool_data: returns a list tool_data tables:: """ - return list( a.to_dict() for a in trans.app.tool_data_tables.data_tables.values() ) + return list( a.to_dict() for a in self._data_tables.values() ) @web.require_admin - @web.expose_api + @expose_api def show( self, trans, id, **kwds ): - return trans.app.tool_data_tables.data_tables[id].to_dict(view='element') + return self._data_table(id).to_dict(view='element') @web.require_admin - @web.expose_api + @expose_api def delete( self, trans, id, **kwd ): """ DELETE /api/tool_data/{id} @@ -61,3 +63,13 @@ return "Invalid data table item ( %s ) specified. Wrong number of columns (%s given, %s required)." % ( str( values ), str(len(split_values)), str(len(data_table.get_column_name_list()))) return data_table.remove_entry(split_values) + + def _data_table( self, id ): + try: + return self._data_tables[id] + except IndexError: + raise exceptions.ObjectNotFound("No such data table %s" % id) + + @property + def _data_tables( self ): + return self.app.tool_data_tables.data_tables https://bitbucket.org/galaxy/galaxy-central/commits/edbe286564de/ Changeset: edbe286564de User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: Implement a detailed break down of data table fields via API. Originally this approach was laid out by Kyle Ellrott in this pull request (https://bitbucket.org/galaxy/galaxy-central/pull-request/531/add-downloads-t...). The changes to galaxy.tools.data are entirely his contribution, I only reworked the API and endpoint slightly and did some stylistic fixes and refactoring. Affected #: 4 files diff -r 6d54a7fd1fe9edfdf2f5c2fdb1b5ddb7eebdf59a -r edbe286564de14b2d51656c03443b3e4aa99e3ab lib/galaxy/tools/data/__init__.py --- a/lib/galaxy/tools/data/__init__.py +++ b/lib/galaxy/tools/data/__init__.py @@ -9,7 +9,11 @@ import logging import os import os.path +import re import string +import hashlib + +from glob import glob from galaxy import util from galaxy.util.odict import odict @@ -340,6 +344,13 @@ def get_fields( self ): return self.data + def get_field(self, value): + rval = None + for i in self.get_named_fields_list(): + if i['value'] == value: + rval = TabularToolDataField(i) + return rval + def get_named_fields_list( self ): rval = [] named_colums = self.get_column_name_list() @@ -580,6 +591,58 @@ return rval +class TabularToolDataField(Dictifiable, object): + + dict_collection_visible_keys = [] + + def __init__(self, data): + self.data = data + + def __getitem__(self, key): + return self.data[key] + + def get_base_path(self): + return os.path.normpath(os.path.abspath( self.data['path'] )) + + def get_base_dir(self): + path = self.get_base_path() + if not os.path.isdir(path): + path = os.path.dirname(path) + return path + + def clean_base_dir(self, path): + return re.sub( "^" + self.get_base_dir() + r"/*", "", path ) + + def get_files(self): + return glob( self.get_base_path() + "*" ) + + def get_filesize_map(self, rm_base_dir=False): + out = {} + for path in self.get_files(): + if rm_base_dir: + out[self.clean_base_dir(path)] = os.path.getsize(path) + else: + out[path] = os.path.getsize(path) + return out + + def get_fingerprint(self): + sha1 = hashlib.sha1() + fmap = self.get_filesize_map(True) + for k in sorted(fmap.keys()): + sha1.update(k) + sha1.update(str(fmap[k])) + return sha1.hexdigest() + + def to_dict(self): + rval = super(TabularToolDataField, self).to_dict() + rval['name'] = self.data['value'] + rval['fields'] = self.data + rval['base_dir'] = self.get_base_dir(), + rval['files'] = self.get_filesize_map(True) + rval['fingerprint'] = self.get_fingerprint() + return rval + + def expand_here_template(content, here=None): if here and content: content = string.Template(content).safe_substitute( { "__HERE__": here }) diff -r 6d54a7fd1fe9edfdf2f5c2fdb1b5ddb7eebdf59a -r edbe286564de14b2d51656c03443b3e4aa99e3ab lib/galaxy/webapps/galaxy/api/tool_data.py --- a/lib/galaxy/webapps/galaxy/api/tool_data.py +++ b/lib/galaxy/webapps/galaxy/api/tool_data.py @@ -64,6 +64,22 @@ return data_table.remove_entry(split_values) + @web.require_admin + @expose_api + def show_field( self, trans, id, value, **kwds ): + """ + GET /api/tool_data/<id>/fields/<value> + + Get information about a partiular field in a tool_data table + """ + return self._data_table_field( id, value ).to_dict() + + def _data_table_field( self, id, value ): + out = self._data_table(id).get_field(value) + if out is None: + raise exceptions.ObjectNotFound("No such field %s in data table %s." % (value, id)) + return out + def _data_table( self, id ): try: return self._data_tables[id] diff -r 6d54a7fd1fe9edfdf2f5c2fdb1b5ddb7eebdf59a -r edbe286564de14b2d51656c03443b3e4aa99e3ab lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -171,6 +171,7 @@ webapp.mapper.resource( 'dataset', 'datasets', path_prefix='/api' ) webapp.mapper.resource( 'tool_data', 'tool_data', path_prefix='/api' ) + webapp.mapper.connect( '/api/tool_data/{id:.+?}/fields/{value:.+?}', action='show_field', controller="tool_data" ) webapp.mapper.resource( 'dataset_collection', 'dataset_collections', path_prefix='/api/') webapp.mapper.resource( 'sample', 'samples', path_prefix='/api' ) webapp.mapper.resource( 'request', 'requests', path_prefix='/api' ) diff -r 6d54a7fd1fe9edfdf2f5c2fdb1b5ddb7eebdf59a -r edbe286564de14b2d51656c03443b3e4aa99e3ab test/api/test_tool_data.py --- a/test/api/test_tool_data.py +++ b/test/api/test_tool_data.py @@ -28,3 +28,11 @@ assert first_entry[0] == "data1" assert first_entry[1] == "data1name" assert first_entry[2].endswith("test/functional/tool-data/data1/entry.txt") + + def test_show_field(self): + show_field_response = self._get( "tool_data/testalpha/fields/data1", admin=True ) + self._assert_status_code_is( show_field_response, 200 ) + field = show_field_response.json() + self._assert_has_keys( field, "files", "name", "fields", "fingerprint", "base_dir" ) + files = field[ "files" ] + assert len( files ) == 2 https://bitbucket.org/galaxy/galaxy-central/commits/e416c3a2f956/ Changeset: e416c3a2f956 User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: Allow downloading index files via tool data API. This provides direct access to the files to admins - probably still wise to provide some mechanism to download a copressed archive of these files. Affected #: 3 files diff -r edbe286564de14b2d51656c03443b3e4aa99e3ab -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b lib/galaxy/webapps/galaxy/api/tool_data.py --- a/lib/galaxy/webapps/galaxy/api/tool_data.py +++ b/lib/galaxy/webapps/galaxy/api/tool_data.py @@ -1,6 +1,9 @@ +import os + from galaxy import exceptions from galaxy import web from galaxy.web import _future_expose_api as expose_api +from galaxy.web import _future_expose_api_raw as expose_api_raw from galaxy.web.base.controller import BaseAPIController @@ -74,6 +77,16 @@ """ return self._data_table_field( id, value ).to_dict() + @web.require_admin + @expose_api_raw + def download_field_file( self, trans, id, value, path, **kwds ): + field_value = self._data_table_field( id, value ) + base_dir = field_value.get_base_dir() + full_path = os.path.join( base_dir, path ) + if full_path not in field_value.get_files(): + raise exceptions.ObjectNotFound("No such path in data table field.") + return open(full_path, "r") + def _data_table_field( self, id, value ): out = self._data_table(id).get_field(value) if out is None: diff -r edbe286564de14b2d51656c03443b3e4aa99e3ab -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -171,6 +171,7 @@ webapp.mapper.resource( 'dataset', 'datasets', path_prefix='/api' ) webapp.mapper.resource( 'tool_data', 'tool_data', path_prefix='/api' ) + webapp.mapper.connect( '/api/tool_data/{id:.+?}/fields/{value:.+?}/files/{path:.+?}', action='download_field_file', controller="tool_data" ) webapp.mapper.connect( '/api/tool_data/{id:.+?}/fields/{value:.+?}', action='show_field', controller="tool_data" ) webapp.mapper.resource( 'dataset_collection', 'dataset_collections', path_prefix='/api/') webapp.mapper.resource( 'sample', 'samples', path_prefix='/api' ) diff -r edbe286564de14b2d51656c03443b3e4aa99e3ab -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b test/api/test_tool_data.py --- a/test/api/test_tool_data.py +++ b/test/api/test_tool_data.py @@ -36,3 +36,9 @@ self._assert_has_keys( field, "files", "name", "fields", "fingerprint", "base_dir" ) files = field[ "files" ] assert len( files ) == 2 + + def test_download_field_file(self): + show_field_response = self._get( "tool_data/testalpha/fields/data1/files/entry.txt", admin=True ) + self._assert_status_code_is( show_field_response, 200 ) + content = show_field_response.content + assert content == "This is data 1.", content https://bitbucket.org/galaxy/galaxy-central/commits/66fbbbd58209/ Changeset: 66fbbbd58209 User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: Allow specification of multiple data manager configuration files... ... use this to create API functional tests for data managers. Affected #: 7 files diff -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 lib/galaxy/tools/data_manager/manager.py --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -25,7 +25,10 @@ self.managed_data_tables = odict() self.tool_path = None self.filename = xml_filename or self.app.config.data_manager_config_file - self.load_from_xml( self.filename ) + for filename in util.listify( self.filename ): + if not filename: + continue + self.load_from_xml( filename ) if self.app.config.shed_data_manager_config_file: self.load_from_xml( self.app.config.shed_data_manager_config_file, store_tool_path=False, replace_existing=True ) diff -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 scripts/functional_tests.py --- a/scripts/functional_tests.py +++ b/scripts/functional_tests.py @@ -241,6 +241,12 @@ if os.path.exists( tool_data_config ): default_tool_data_config = tool_data_config tool_data_table_config_path = '%s,test/functional/tool-data/sample_tool_data_tables.xml' % default_tool_data_config + + default_data_manager_config = 'config/data_manager_conf.xml.sample' + for data_manager_config in ['config/data_manager_conf.xml', 'data_manager_conf.xml' ]: + if os.path.exists( data_manager_config ): + default_data_manager_config = data_manager_config + data_manager_config_file = "%s,test/functional/tools/sample_data_manager_conf.xml" % default_data_manager_config shed_tool_data_table_config = 'config/shed_tool_data_table_conf.xml' tool_dependency_dir = os.environ.get( 'GALAXY_TOOL_DEPENDENCY_DIR', None ) use_distributed_object_store = os.environ.get( 'GALAXY_USE_DISTRIBUTED_OBJECT_STORE', False ) @@ -329,6 +335,7 @@ master_api_key=master_api_key, use_tasked_jobs=True, enable_beta_tool_formats=True, + data_manager_config_file=data_manager_config_file, ) if install_database_connection is not None: kwargs[ 'install_database_connection' ] = install_database_connection diff -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 test/api/test_tool_data.py --- a/test/api/test_tool_data.py +++ b/test/api/test_tool_data.py @@ -1,6 +1,9 @@ """ Tests for the tool data API. """ +import json + from base import api +from .helpers import DatasetPopulator import operator @@ -42,3 +45,19 @@ self._assert_status_code_is( show_field_response, 200 ) content = show_field_response.content assert content == "This is data 1.", content + + def test_create_data_with_manager(self): + dataset_populator = DatasetPopulator( self.galaxy_interactor ) + history_id = dataset_populator.new_history() + payload = dataset_populator.run_tool_payload( + tool_id="data_manager", + inputs={"ignored_value": "moo"}, + history_id=history_id, + ) + create_response = self._post( "tools", data=payload ) + self._assert_status_code_is( create_response, 200 ) + dataset_populator.wait_for_history( history_id, assert_ok=True ) + show_response = self._get( "tool_data/testbeta", admin=True ) + print show_response.content + assert False + diff -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 test/functional/tool-data/sample_tool_data_tables.xml --- a/test/functional/tool-data/sample_tool_data_tables.xml +++ b/test/functional/tool-data/sample_tool_data_tables.xml @@ -3,10 +3,8 @@ <columns>value, name, path</columns><file path="${__HERE__}/testalpha.loc" /></table> - <!-- <table name="testbeta" comment_char="#"> - <columns>value, foo, bar, path</columns> + <columns>value, path</columns><file path="${__HERE__}/testbeta.loc" /></table> - --></tables> diff -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 test/functional/tools/data_manager.xml --- /dev/null +++ b/test/functional/tools/data_manager.xml @@ -0,0 +1,16 @@ +<tool id="data_manager" name="Test Data Manager" tool_type="manage_data" version="0.0.1"> + <configfiles> + <configfile name="static_test_data">{"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}}</configfile> + </configfiles> + <command> + mkdir $out_file.files_path ; + echo "A new value" > $out_file.files_path/newvalue.txt; + cp $static_test_data $out_file + </command> + <inputs> + <param type="text" name="ignored_value" value="" label="Ignored" /> + </inputs> + <outputs> + <data name="out_file" format="data_manager_json"/> + </outputs> +</tool> diff -r e416c3a2f956a28127eb730076f37ad6ee4b1d8b -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 test/functional/tools/sample_data_manager_conf.xml --- /dev/null +++ b/test/functional/tools/sample_data_manager_conf.xml @@ -0,0 +1,16 @@ +<data_managers tool_path="test/functional/tools"> + <data_manager tool_file="data_manager.xml" id="test_data_manager" version="1.0"> + <data_table name="testbeta"> + <output> + <column name="value" /> + <column name="path" output_ref="out_file" > + <move type="directory" relativize_symlinks="True"> + <target base="${GALAXY_DATA_MANAGER_DATA_PATH}">testbeta/${value}</target> + </move> + <value_translation>${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path}</value_translation> + <value_translation type="function">abspath</value_translation> + </column> + </output> + </data_table> + </data_manager> +</data_managers> https://bitbucket.org/galaxy/galaxy-central/commits/ecac8a867961/ Changeset: ecac8a867961 User: jmchilton Date: 2014-12-15 05:20:35+00:00 Summary: Implement test case for Pull Request #577. Affected #: 1 file diff -r 66fbbbd58209dba224f95c2aaf973ec7c64b7c11 -r ecac8a8679612d419347dcbedc398319418b077e test/api/test_tool_data.py --- a/test/api/test_tool_data.py +++ b/test/api/test_tool_data.py @@ -5,6 +5,8 @@ from base import api from .helpers import DatasetPopulator +from requests import delete + import operator @@ -46,7 +48,10 @@ content = show_field_response.content assert content == "This is data 1.", content - def test_create_data_with_manager(self): + def test_delete_entry(self): + show_response = self._get( "tool_data/testbeta", admin=True ) + original_count = len(show_response.json()["fields"]) + dataset_populator = DatasetPopulator( self.galaxy_interactor ) history_id = dataset_populator.new_history() payload = dataset_populator.run_tool_payload( @@ -58,6 +63,12 @@ self._assert_status_code_is( create_response, 200 ) dataset_populator.wait_for_history( history_id, assert_ok=True ) show_response = self._get( "tool_data/testbeta", admin=True ) - print show_response.content - assert False + updated_fields = show_response.json()["fields"] + assert len(updated_fields) == original_count + 1 + field0 = updated_fields[0] + url = self._api_url( "tool_data/testbeta?key=%s" % self.galaxy_interactor.master_api_key ) + delete( url, data=json.dumps({"values": "\t".join(field0)}) ) + show_response = self._get( "tool_data/testbeta", admin=True ) + updated_fields = show_response.json()["fields"] + assert len(updated_fields) == original_count Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org