galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2012
- 1 participants
- 142 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3ee0e5ee1b37/
changeset: 3ee0e5ee1b37
user: jgoecks
date: 2012-12-10 21:38:57
summary: Trackster: remove unused var.
affected #: 1 file
diff -r 2fdc0270baad3b020118d05cc3429993a3d80207 -r 3ee0e5ee1b375c0fd580c26a93850f2fa44f93f0 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -3278,7 +3278,6 @@
this.tile_predraw_init();
var canvas = track.view.canvas_manager.new_canvas(),
- tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = region.get('start'),
tile_high = region.get('end'),
all_data_index = 0,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: do not use width parameter in draw helper because it's not needed anymore.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/2fdc0270baad/
changeset: 2fdc0270baad
user: jgoecks
date: 2012-12-10 21:31:38
summary: Trackster: do not use width parameter in draw helper because it's not needed anymore.
affected #: 1 file
diff -r 512c7561e1b95b381345296a0da3aa3effc05803 -r 2fdc0270baad3b020118d05cc3429993a3d80207 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2766,8 +2766,8 @@
* Generate a key for the tile cache.
* TODO: create a TileCache object (like DataCache) and generate key internally.
*/
- _gen_tile_cache_key: function(width, w_scale, tile_index) {
- return width + '_' + w_scale + '_' + tile_index;
+ _gen_tile_cache_key: function(w_scale, tile_index) {
+ return w_scale + '_' + tile_index;
},
/**
* Request that track be drawn.
@@ -2826,7 +2826,7 @@
is_tile = function(o) { return (o && 'track' in o); };
// Draw tiles.
while ( ( tile_index * TILE_SIZE * resolution ) < high ) {
- var draw_result = this.draw_helper( force, width, tile_index, resolution, this.tiles_div, w_scale );
+ var draw_result = this.draw_helper( force, tile_index, resolution, this.tiles_div, w_scale );
if ( is_tile(draw_result) ) {
drawn_tiles.push( draw_result );
} else {
@@ -2875,9 +2875,9 @@
* Retrieves from cache, draws, or sets up drawing for a single tile. Returns either a Tile object or a
* jQuery.Deferred object that is fulfilled when tile can be drawn again.
*/
- draw_helper: function(force, width, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
var track = this,
- key = this._gen_tile_cache_key(width, w_scale, tile_index),
+ key = this._gen_tile_cache_key(w_scale, tile_index),
region = this._get_tile_bounds(tile_index, resolution);
// Init kwargs if necessary to avoid having to check if kwargs defined.
@@ -3227,10 +3227,10 @@
this.action_icons.param_space_viz_icon.hide();
},
can_draw: Drawable.prototype.can_draw,
- draw_helper: function(force, width, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
// FIXME: this function is similar to TiledTrack.draw_helper -- can the two be merged/refactored?
var track = this,
- key = this._gen_tile_cache_key(width, w_scale, tile_index),
+ key = this._gen_tile_cache_key(w_scale, tile_index),
region = this._get_tile_bounds(tile_index, resolution);
// Init kwargs if necessary to avoid having to check if kwargs defined.
@@ -3413,7 +3413,7 @@
for (var i = 0; i < tiles.length; i++) {
var tile = tiles[i];
if (tile.html_elt.find("canvas").height() !== max_height) {
- this.draw_helper(true, width, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { height: max_height } );
+ this.draw_helper(true, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { height: max_height } );
tile.html_elt.remove();
}
}
@@ -3451,9 +3451,9 @@
/**
* Only retrieves data and draws tile if reference data can be displayed.
*/
- draw_helper: function(force, width, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
if (w_scale > this.view.canvas_manager.char_width_px) {
- return TiledTrack.prototype.draw_helper.call(this, force, width, tile_index, resolution, parent_element, w_scale, kwargs);
+ return TiledTrack.prototype.draw_helper.call(this, force, tile_index, resolution, parent_element, w_scale, kwargs);
}
else {
this.hide_contents();
@@ -3815,7 +3815,7 @@
var tile = tiles[i];
if (tile.max_val !== global_max) {
tile.html_elt.remove();
- track.draw_helper(true, width, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { more_tile_data: { max: global_max } } );
+ track.draw_helper(true, tile.index, tile.resolution, tile.html_elt.parent(), w_scale, { more_tile_data: { max: global_max } } );
}
}
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: More tool shed functional test enhancements.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/512c7561e1b9/
changeset: 512c7561e1b9
user: inithello
date: 2012-12-10 20:41:14
summary: More tool shed functional test enhancements.
affected #: 8 files
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -15,6 +15,7 @@
self.hgweb_config_dir = os.environ.get( 'TEST_HG_WEB_CONFIG_DIR' )
self.hgweb_config_manager = galaxy.webapps.community.util.hgweb_config.HgWebConfigManager()
self.hgweb_config_manager.hgweb_config_dir = self.hgweb_config_dir
+ self.tool_shed_test_tmp_dir = os.environ.get( 'TOOL_SHED_TEST_TMP_DIR', None)
self.host = os.environ.get( 'TOOL_SHED_TEST_HOST' )
self.port = os.environ.get( 'TOOL_SHED_TEST_PORT' )
self.url = "http://%s:%s" % ( self.host, self.port )
@@ -213,6 +214,9 @@
string = string.replace( character, replacement )
return string
def generate_repository_dependency_xml( self, repository, xml_filename, dependency_description='' ):
+ file_path = os.path.split( xml_filename )[0]
+ if not os.path.exists( file_path ):
+ os.makedirs( file_path )
changeset_revision = self.get_repository_tip( repository )
if dependency_description:
description = ' description="%s"' % dependency_description
@@ -226,6 +230,13 @@
description=description )
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
+ def generate_temp_path( self, test_script_path, additional_paths=[] ):
+ return os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ def get_filename( self, filename, filepath=None ):
+ if filepath is not None:
+ return os.path.abspath( os.path.join( filepath, filename ) )
+ else:
+ return os.path.abspath( os.path.join( self.file_dir, filename ) )
def get_latest_repository_metadata_for_repository( self, repository ):
# TODO: This will not work as expected. Fix it.
return repository.metadata_revisions[ 0 ]
@@ -335,6 +346,7 @@
def upload_file( self,
repository,
filename,
+ filepath=None,
valid_tools_only=True,
strings_displayed=[],
strings_not_displayed=[],
@@ -344,6 +356,6 @@
strings_displayed.append( "has been successfully uploaded to the repository." )
for key in kwd:
tc.fv( "1", key, kwd[ key ] )
- tc.formfile( "1", "file_data", self.get_filename( filename ) )
+ tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
tc.submit( "upload_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-repository_name = 'filtering'
+repository_name = 'filtering_0000'
repository_description = "Galaxy's filtering tool"
repository_long_description = "Long description of Galaxy's filtering tool"
@@ -19,41 +19,48 @@
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = get_private_role( admin_user )
def test_0005_create_categories( self ):
- """Create categories"""
- self.create_category( 'Text Manipulation', 'Tools for manipulating text' )
- self.create_category( 'Text Analysis', 'Tools for analyzing text' )
+ """Create categories for this test suite"""
+ self.create_category( 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 1' )
+ self.create_category( 'Test 0000 Basic Repository Features 2', 'Test 0000 Basic Repository Features 2' )
def test_0010_create_repository( self ):
"""Create the filtering repository"""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
'Repository %s has been created' % "'%s'" % repository_name ]
self.create_repository( repository_name,
repository_description,
repository_long_description=repository_long_description,
- categories=[ 'Text Manipulation' ],
+ categories=[ 'Test 0000 Basic Repository Features 1' ],
strings_displayed=strings_displayed )
def test_0015_edit_repository( self ):
"""Edit the repository name, description, and long description"""
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
new_name = "renamed_filtering"
new_description = "Edited filtering tool"
new_long_description = "Edited long description"
self.edit_repository_information( repository, repo_name=new_name, description=new_description, long_description=new_long_description )
def test_0020_change_repository_category( self ):
"""Change the categories associated with the filtering repository"""
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
- self.edit_repository_categories( repository, categories_to_add=[ "Text Analysis" ], categories_to_remove=[ "Text Manipulation" ] )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.edit_repository_categories( repository,
+ categories_to_add=[ "Test 0000 Basic Repository Features 2" ],
+ categories_to_remove=[ "Test 0000 Basic Repository Features 1" ] )
def test_0025_grant_write_access( self ):
'''Grant write access to another user'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
- self.grant_write_access( repository, usernames=[ common.test_user_1_name ] )
- self.revoke_write_access( repository, common.test_user_1_name )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.grant_write_access( repository, usernames=[ common.admin_username ] )
+ self.revoke_write_access( repository, common.admin_username )
def test_0030_upload_filtering_1_1_0( self ):
"""Upload filtering_1.1.0.tar to the repository"""
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'filtering/filtering_1.1.0.tar', commit_message="Uploaded filtering 1.1.0" )
def test_0035_verify_repository( self ):
'''Display basic repository pages'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
latest_changeset_revision = self.get_repository_tip( repository )
self.check_for_valid_tools( repository, strings_displayed=[ 'Filter1' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=1 )
@@ -61,16 +68,20 @@
self.check_repository_tools_for_changeset_revision( repository, tip )
self.check_repository_metadata( repository, tip_only=False )
self.browse_repository( repository, strings_displayed=[ 'Browse %s revision' % repository.name, '(repository tip)' ] )
- self.display_repository_clone_page( common.admin_username,
+ self.display_repository_clone_page( common.test_user_1_name,
repository_name,
strings_displayed=[ 'Uploaded filtering 1.1.0', latest_changeset_revision ] )
def test_0040_alter_repository_states( self ):
'''Test toggling the malicious and deprecated repository flags.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
self.set_repository_malicious( repository, set_malicious=True, strings_displayed=[ 'The repository tip has been defined as malicious.' ] )
self.set_repository_malicious( repository,
set_malicious=False,
strings_displayed=[ 'The repository tip has been defined as <b>not</b> malicious.' ] )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
self.set_repository_deprecated( repository,
strings_displayed=[ 'has been marked as deprecated', 'Mark as not deprecated' ] )
self.display_manage_repository_page( repository,
@@ -82,7 +93,7 @@
set_deprecated=False )
def test_0045_display_repository_tip_file( self ):
'''Display the contents of filtering.xml in the repository tip revision'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_repository_file_contents( repository=repository,
filename='filtering.xml',
filepath=None,
@@ -90,16 +101,16 @@
strings_not_displayed=[] )
def test_0050_upload_filtering_txt_file( self ):
'''Upload filtering.txt file associated with tool version 1.1.0.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
- 'filtering/filtering.txt',
+ 'filtering/filtering_0000.txt',
commit_message="Uploaded filtering.txt",
uncompress_file='No',
remove_repo_files_not_in_tar='No' )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0055_upload_filtering_test_data( self ):
'''Upload filtering test data.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'filtering/filtering_test_data.tar', commit_message="Uploaded filtering test data", remove_repo_files_not_in_tar='No' )
self.display_repository_file_contents( repository=repository,
filename='1.bed',
@@ -109,14 +120,14 @@
self.check_repository_metadata( repository, tip_only=True )
def test_0060_upload_filtering_2_2_0( self ):
'''Upload filtering version 2.2.0'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'filtering/filtering_2.2.0.tar',
commit_message="Uploaded filtering 2.2.0",
remove_repo_files_not_in_tar='No' )
def test_0065_verify_filtering_repository( self ):
'''Verify the new tool versions and repository metadata.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip = self.get_repository_tip( repository )
self.check_for_valid_tools( repository )
strings_displayed = self.get_repository_metadata_revisions( repository ).append( 'Select a revision' )
@@ -126,7 +137,7 @@
self.check_repository_metadata( repository, tip_only=False )
def test_0070_upload_readme_txt_file( self ):
'''Upload readme.txt file associated with tool version 2.2.0.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'readme.txt', commit_message="Uploaded readme.txt" )
self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] )
# Verify that there is a different readme file for each metadata revision.
@@ -134,13 +145,13 @@
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0', 'This is a readme file.' ] )
def test_0075_delete_readme_txt_file( self ):
'''Delete the readme.txt file.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0080_search_for_valid_filter_tool( self ):
'''Search for the filtering tool by tool ID, name, and version.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip_changeset = self.get_repository_tip( repository )
search_fields = dict( tool_id='Filter1', tool_name='filter', tool_version='2.2.0' )
self.search_for_valid_tools( search_fields=search_fields, strings_displayed=[ tip_changeset ], strings_not_displayed=[] )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-repository_name = 'freebayes'
+repository_name = 'freebayes_0010'
repository_description = "Galaxy's freebayes tool"
repository_long_description = "Long description of Galaxy's freebayes tool"
@@ -10,21 +10,28 @@
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = get_private_role( admin_user )
def test_0005_create_category( self ):
- """Create SNP Analysis category."""
- self.create_category( 'SNP Analysis', 'Tools for single nucleotide polymorphism data such as WGA' )
+ """Create a category for this test suite"""
+ self.create_category( 'Test 0010 Repository With Tool Dependencies', 'Tests for a repository with tool dependencies.' )
def test_0010_create_freebayes_repository_and_upload_tool_xml( self ):
'''Create freebayes repository and upload freebayes.xml without tool_data_table_conf.xml.sample. This should result in an error message and invalid tool.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
self.create_repository( repository_name,
repository_description,
repository_long_description=repository_long_description,
- categories=[ 'SNP Analysis' ],
+ categories=[ 'Test 0010 Repository With Tool Dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/freebayes.xml',
valid_tools_only=False,
@@ -37,7 +44,7 @@
strings_displayed=[ 'requires an entry', 'tool_data_table_conf.xml' ] )
def test_0015_upload_missing_tool_data_table_conf_file( self ):
'''Upload the missing tool_data_table_conf.xml.sample file to the repository.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/tool_data_table_conf.xml.sample',
valid_tools_only=False,
@@ -50,27 +57,27 @@
strings_displayed=[ 'refers to a file', 'sam_fa_indices.loc' ] )
def test_0020_upload_missing_sample_loc_file( self ):
'''Upload the missing sam_fa_indices.loc.sample file to the repository.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/sam_fa_indices.loc.sample',
strings_displayed=[],
commit_message='Uploaded tool data table .loc file.' )
def test_0025_upload_invalid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.5 of the freebayes package.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
commit_message='Uploaded invalid tool dependency XML.' )
def test_0030_upload_valid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
os.path.join( 'freebayes', 'tool_dependencies.xml' ),
commit_message='Uploaded valid tool dependency XML.' )
def test_0035_verify_tool_dependencies( self ):
'''Verify that the uploaded tool_dependencies.xml specifies the correct package versions.'''
- repository = get_repository_by_name_and_owner( repository_name, common.admin_username )
+ repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_manage_repository_page( repository,
strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools' ],
strings_not_displayed=[ 'Invalid tools' ] )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -1,7 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
-datatypes_repository_name = 'emboss_datatypes'
+datatypes_repository_name = 'emboss_datatypes_0020'
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
@@ -14,20 +14,18 @@
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = get_private_role( admin_user )
- self.logout()
- self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
def test_0005_create_category( self ):
- """Create Sequence Analysis category"""
- self.logout()
- self.login( email=common.admin_email, username=common.admin_username )
- self.create_category( 'Sequence Analysis', 'Tools for performing Protein and DNA/RNA analysis' )
+ """Create a category for this test suite"""
+ self.create_category( 'Test 0020 Basic Repository Dependencies', 'Testing basic repository dependency features.' )
def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
'''Create and populate the emboss_datatypes repository.'''
self.logout()
@@ -35,7 +33,7 @@
self.create_repository( datatypes_repository_name,
datatypes_repository_description,
repository_long_description=datatypes_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0020 Basic Repository Dependencies' ],
strings_displayed=[] )
repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
@@ -48,7 +46,7 @@
self.create_repository( emboss_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Text Manipulation' ],
+ categories=[ 'Test 0020 Basic Repository Dependencies' ],
strings_displayed=[] )
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
@@ -56,8 +54,13 @@
'''Generate and upload the repository_dependencies.xml file'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
- self.generate_repository_dependency_xml( datatypes_repository, self.get_filename( 'emboss/5/repository_dependencies.xml' ) )
- self.upload_file( repository, 'emboss/5/repository_dependencies.xml', commit_message='Uploaded repository_dependencies.xml' )
+ repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( datatypes_repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
def test_0030_verify_emboss_5_repository_dependency_on_emboss_datatypes( self ):
'''Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
@@ -65,7 +68,3 @@
changeset_revision = self.get_repository_tip( datatypes_repository )
strings_displayed = [ datatypes_repository_name, common.test_user_1_name, changeset_revision, 'Repository dependencies' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
- def test_0035_cleanup( self ):
- '''Clean up generated test data.'''
- if os.path.exists( self.get_filename( 'emboss/5/repository_dependencies.xml' ) ):
- os.remove( self.get_filename( 'emboss/5/repository_dependencies.xml' ) )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -5,7 +5,7 @@
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
-emboss_repository_name = 'emboss'
+emboss_repository_name = 'emboss_0030'
emboss_5_repository_name = 'emboss_5_0030'
emboss_6_repository_name = 'emboss_6_0030'
emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
@@ -13,38 +13,31 @@
class TestRepositoryDependencyRevisions( ShedTwillTestCase ):
'''Test dependencies on different revisions of a repository.'''
- '''
- create repository emboss_5_0030
- create repository emboss_6_0030
- create repository emboss_datatypes if necessary
- create repository emboss
- emboss_5 has repository_dependency.xml file that defines emboss_datatypes
- emboss_6 has repository_dependency.xml file that defines emboss_datatypes
- get information to create repository dependency imformation for emboss
- emboss depends on emboss_5
- then emboss depends on emboss_6
- verify per-changeset dependencies
- '''
def test_0000_initiate_users( self ):
"""Create necessary user accounts."""
self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % regular_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
+ def test_0005_create_repositories( self ):
+ '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % regular_email
- test_user_1_private_role = get_private_role( test_user_1 )
- def test_0005_create_repositories( self ):
- '''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
if emboss_5_repository is None:
self.create_repository( emboss_5_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
@@ -53,7 +46,7 @@
self.create_repository( emboss_6_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
self.upload_file( emboss_6_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball..' )
@@ -62,7 +55,7 @@
self.create_repository( datatypes_repository_name,
datatypes_repository_description,
repository_long_description=datatypes_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
if self.repository_is_new( datatypes_repository ):
@@ -72,45 +65,56 @@
self.create_repository( emboss_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
- categories=[ 'Sequence Analysis' ],
+ categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
def test_0010_generate_repository_dependencies_for_emboss_5( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
- self.generate_repository_dependency_xml( datatypes_repository, self.get_filename( 'emboss/repository_dependencies.xml' ) )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
+ self.generate_repository_dependency_xml( datatypes_repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
- self.upload_file( emboss_5_repository, 'emboss/repository_dependencies.xml', commit_message='Uploaded repository_depepndencies.xml.' )
+ self.upload_file( emboss_5_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_depepndencies.xml.' )
def test_0015_generate_repository_dependencies_for_emboss_6( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_6 repository.'''
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
- self.upload_file( emboss_6_repository, 'emboss/repository_dependencies.xml', commit_message='Uploaded repository_depepndencies.xml.' )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
+ self.upload_file( emboss_6_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_depepndencies.xml.' )
def test_0020_generate_repository_dependency_on_emboss_5( self ):
'''Create and upload repository_dependencies.xml for the emboss_5_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
self.generate_repository_dependency_xml( emboss_5_repository,
- self.get_filename( 'emboss/5/repository_dependencies.xml' ),
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 5 repository.' )
self.upload_file( emboss_repository,
- 'emboss/5/repository_dependencies.xml',
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_5' )
def test_0025_generate_repository_dependency_on_emboss_6( self ):
'''Create and upload repository_dependencies.xml for the emboss_6_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '6' ] )
self.generate_repository_dependency_xml( emboss_6_repository,
- self.get_filename( 'emboss/6/repository_dependencies.xml' ),
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 6 repository.' )
self.upload_file( emboss_repository,
- 'emboss/6/repository_dependencies.xml',
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_6' )
def test_0030_verify_repository_dependency_revisions( self ):
'''Verify that different metadata revisions of the emboss repository have different repository dependencies.'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- # Reset emboss metadata to pick up the repository dependency changes.
-# self.reset_repository_metadata( repository )
repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
datatypes_tip = self.get_repository_tip( datatypes_repository )
@@ -127,8 +131,3 @@
self.display_manage_repository_page( repository,
changeset_revision=changeset_revision,
strings_displayed=[ str( metadata ) for metadata in repository_dependency_metadata ] )
- def test_0035_cleanup( self ):
- '''Clean up generated repository dependency XML files.'''
- for filename in [ 'emboss/5/repository_dependencies.xml', 'emboss/6/repository_dependencies.xml', 'emboss/repository_dependencies.xml' ]:
- if os.path.exists( self.get_filename( filename ) ):
- os.remove( self.get_filename( filename ) )
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -10,6 +10,7 @@
# the hgweb.config file, the database, new repositories, etc. Since the tool shed browses repository contents via HTTP,
# the full path to the temporary directroy wher eht repositories are located cannot contain invalid url characters.
tool_shed_test_tmp_dir = os.path.join( tool_shed_home_directory, 'tmp' )
+os.environ[ 'TOOL_SHED_TEST_TMP_DIR' ] = tool_shed_test_tmp_dir
new_path = [ os.path.join( cwd, "lib" ) ]
new_path.extend( sys.path[1:] )
sys.path = new_path
@@ -270,7 +271,7 @@
for dir in [ tool_shed_test_tmp_dir ]:
if os.path.exists( dir ):
log.info( "Cleaning up temporary files in %s" % dir )
- #shutil.rmtree( dir )
+ shutil.rmtree( dir )
except:
pass
if success:
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/test_data/filtering/filtering.txt
--- a/test/tool_shed/test_data/filtering/filtering.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Readme file for filtering 1.1.0
diff -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a -r 512c7561e1b95b381345296a0da3aa3effc05803 test/tool_shed/test_data/filtering/filtering_0000.txt
--- /dev/null
+++ b/test/tool_shed/test_data/filtering/filtering_0000.txt
@@ -0,0 +1,1 @@
+Readme file for filtering 1.1.0
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for SelectToolParameter rerun, workflow when multiple="true", submited by Jim Johnson.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bd020b5aa531/
changeset: bd020b5aa531
user: dan
date: 2012-12-10 18:48:58
summary: Fix for SelectToolParameter rerun,workflow when multiple="true", submited by Jim Johnson.
affected #: 1 file
diff -r 0042b30216fc5dc4f92b314c93a3aa9a0d810d77 -r bd020b5aa5314718dbd7afe9e4e9e05bb3b53c7a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -714,7 +714,12 @@
else:
if value not in legal_values:
raise ValueError( "An invalid option was selected, please verify" )
- return value
+ return value
+ def to_html_value( self, value, app ):
+ if isinstance( value, list ):
+ return value
+ else:
+ return str( value )
def to_param_dict_string( self, value, other_values={} ):
if value is None:
return "None"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: clements: Added boilerplate for Sphinx doc home page.
by Bitbucket 07 Dec '12
by Bitbucket 07 Dec '12
07 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0042b30216fc/
changeset: 0042b30216fc
user: clements
date: 2012-11-06 20:14:22
summary: Added boilerplate for Sphinx doc home page.
affected #: 1 file
diff -r c8f0ea550d51b2c203f5f60568817164c62220fd -r 0042b30216fc5dc4f92b314c93a3aa9a0d810d77 doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,12 +1,41 @@
Galaxy Code Documentation
*************************
-Galaxy is an open, web-based platform for accessible, reproducible, and
+Galaxy_ is an open, web-based platform for accessible, reproducible, and
transparent computational biomedical research.
-- Accessible: Users without programming experience can easily specify parameters and run tools and workflows.
-- Reproducible: Galaxy captures information so that any user can repeat and understand a complete computational analysis.
-- Transparent: Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+- *Accessible:* Users without programming experience can easily specify parameters and run tools and workflows.
+- *Reproducible:* Galaxy captures information so that any user can repeat and understand a complete computational analysis.
+- *Transparent:* Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+
+Two copies of the Galaxy code doumentation are published by the Galaxy Project
+
+- Galaxy-Dist_: This describes the code in the `most recent official release`_ of Galaxy.
+- Galaxy-Central_: Describes the `current code in the development branch`_ of Galaxy. This is the latest checkin, bleeding edge version of the code. The documentation should never be more than an hour behind the code.
+
+Both copies are hosted at ReadTheDocs_, a publicly supported web site for hosting project documentation.
+
+If you have your own copy of the Galaxy source code, you can also generate your own version of this documentation:
+
+::
+
+ $ cd doc
+ $ make html
+
+The generated documentation will be in ``doc/build/html/`` and can be viewed with a web browser. Note that you will need to install Sphinx and a fair number of module dependencies before this will produce output.
+
+.. _Galaxy: http://galaxyproject.org/
+.. _Galaxy-Dist: https://galaxy-dist.readthedocs.org/
+.. _most recent official release: https://bitbucket.org/galaxy/galaxy-dist
+.. _Galaxy-Central: https://galaxy-central.readthedocs.org/
+.. _current code in the development branch: https://bitbucket.org/galaxy/galaxy-central
+.. _ReadTheDocs: https://readthedocs.org/
+
+
+For more on the Galaxy Project, please visit the `project home page`_.
+
+.. _project home page: http://galaxyproject.org/
+
Contents
========
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8cb49d27d1a4/
changeset: 8cb49d27d1a4
user: lance_parsons
date: 2012-12-07 23:22:48
summary: Fix for finding test-data for installed tools
See bug: https://trello.com/c/Z0vhJEq6
affected #: 1 file
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 8cb49d27d1a4c681eb774ac020d524258709ce20 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -57,8 +57,10 @@
last_galaxy_test_file_dir = None
last_tested_repository_name = None
last_tested_changeset_revision = None
+ tool_path = None
tree = util.parse_xml( config )
root = tree.getroot()
+ tool_path = root.get('tool_path')
for elem in root:
if elem.tag == 'tool':
galaxy_test_file_dir, \
@@ -66,7 +68,8 @@
last_tested_changeset_revision = get_installed_repository_info( elem,
last_galaxy_test_file_dir,
last_tested_repository_name,
- last_tested_changeset_revision )
+ last_tested_changeset_revision,
+ tool_path )
if galaxy_test_file_dir:
if galaxy_test_file_dir != last_galaxy_test_file_dir:
if not os.path.isabs( galaxy_test_file_dir ):
@@ -82,7 +85,8 @@
last_tested_changeset_revision = get_installed_repository_info( section_elem,
last_galaxy_test_file_dir,
last_tested_repository_name,
- last_tested_changeset_revision )
+ last_tested_changeset_revision,
+ tool_path )
if galaxy_test_file_dir:
if galaxy_test_file_dir != last_galaxy_test_file_dir:
if not os.path.isabs( galaxy_test_file_dir ):
@@ -92,7 +96,7 @@
last_galaxy_test_file_dir = galaxy_test_file_dir
return shed_tools_dict
-def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision ):
+def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision, tool_path ):
"""
Return the GALAXY_TEST_FILE_DIR, the containing repository name and the change set revision for the tool elem.
This only happens when testing tools installed from the tool shed.
@@ -107,7 +111,7 @@
if repository_name != last_tested_repository_name or changeset_revision != last_tested_changeset_revision:
# Locate the test-data directory.
installed_tool_path = os.path.join( installed_tool_path_items[ 0 ], 'repos', repository_owner, repository_name, changeset_revision )
- for root, dirs, files in os.walk( installed_tool_path ):
+ for root, dirs, files in os.walk( os.path.join(tool_path, installed_tool_path )):
if 'test-data' in dirs:
return os.path.join( root, 'test-data' ), repository_name, changeset_revision
return None, repository_name, changeset_revision
https://bitbucket.org/galaxy/galaxy-central/changeset/c8f0ea550d51/
changeset: c8f0ea550d51
user: greg
date: 2012-12-08 01:09:00
summary: Merged in lance_parsons/galaxy-central_installed-tools-functional-tests-fix (pull request #95)
affected #: 1 file
diff -r 6f3266a589e397cbdbe8efe4f4d26b7dcdc8924c -r c8f0ea550d51b2c203f5f60568817164c62220fd scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -57,8 +57,10 @@
last_galaxy_test_file_dir = None
last_tested_repository_name = None
last_tested_changeset_revision = None
+ tool_path = None
tree = util.parse_xml( config )
root = tree.getroot()
+ tool_path = root.get('tool_path')
for elem in root:
if elem.tag == 'tool':
galaxy_test_file_dir, \
@@ -66,7 +68,8 @@
last_tested_changeset_revision = get_installed_repository_info( elem,
last_galaxy_test_file_dir,
last_tested_repository_name,
- last_tested_changeset_revision )
+ last_tested_changeset_revision,
+ tool_path )
if galaxy_test_file_dir:
if galaxy_test_file_dir != last_galaxy_test_file_dir:
if not os.path.isabs( galaxy_test_file_dir ):
@@ -82,7 +85,8 @@
last_tested_changeset_revision = get_installed_repository_info( section_elem,
last_galaxy_test_file_dir,
last_tested_repository_name,
- last_tested_changeset_revision )
+ last_tested_changeset_revision,
+ tool_path )
if galaxy_test_file_dir:
if galaxy_test_file_dir != last_galaxy_test_file_dir:
if not os.path.isabs( galaxy_test_file_dir ):
@@ -92,7 +96,7 @@
last_galaxy_test_file_dir = galaxy_test_file_dir
return shed_tools_dict
-def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision ):
+def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision, tool_path ):
"""
Return the GALAXY_TEST_FILE_DIR, the containing repository name and the change set revision for the tool elem.
This only happens when testing tools installed from the tool shed.
@@ -107,7 +111,7 @@
if repository_name != last_tested_repository_name or changeset_revision != last_tested_changeset_revision:
# Locate the test-data directory.
installed_tool_path = os.path.join( installed_tool_path_items[ 0 ], 'repos', repository_owner, repository_name, changeset_revision )
- for root, dirs, files in os.walk( installed_tool_path ):
+ for root, dirs, files in os.walk( os.path.join(tool_path, installed_tool_path )):
if 'test-data' in dirs:
return os.path.join( root, 'test-data' ), repository_name, changeset_revision
return None, repository_name, changeset_revision
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: clements: Added numpy to mock modules list. Readthedocs of galaxy-central is broken, and it's complaining about numpy. So, try adding it to mock modules list and see if that helps.
by Bitbucket 07 Dec '12
by Bitbucket 07 Dec '12
07 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6f3266a589e3/
changeset: 6f3266a589e3
user: clements
date: 2012-11-06 18:31:28
summary: Added numpy to mock modules list. Readthedocs of galaxy-central is broken, and it's complaining about numpy. So, try adding it to mock modules list and see if that helps.
affected #: 1 file
diff -r 6d86fa9ca5977060372704346a1e9a2b6308a292 -r 6f3266a589e397cbdbe8efe4f4d26b7dcdc8924c doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -274,6 +274,6 @@
return Mock()
# adding pbs_python, DRMAA_python, markupsafe, and drmaa here had no effect.
-MOCK_MODULES = ['tables', 'decorator']
+MOCK_MODULES = ['tables', 'decorator', 'numpy']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle somce cases of circular dependencies and other corner case siturations with repository dependency definitions.
by Bitbucket 07 Dec '12
by Bitbucket 07 Dec '12
07 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6d86fa9ca597/
changeset: 6d86fa9ca597
user: greg
date: 2012-12-07 22:47:24
summary: Handle somce cases of circular dependencies and other corner case siturations with repository dependency definitions.
affected #: 3 files
diff -r facdd387b85e814df7428ca8cbb71828d0ec48a2 -r 6d86fa9ca5977060372704346a1e9a2b6308a292 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -2,12 +2,12 @@
from galaxy import util
from galaxy.tools import parameters
from galaxy.util import inflector
-from galaxy.util.json import *
+from galaxy.util import json
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
-from galaxy.webapps.community.util.container_util import *
-from galaxy.datatypes.checkers import *
-from galaxy.model.orm import *
+from galaxy.webapps.community.util import container_util
+from galaxy.datatypes import checkers
+from galaxy.model.orm import and_
from galaxy.tools.parameters import dynamic_options
from galaxy import eggs
@@ -124,45 +124,49 @@
# Datatypes container.
if metadata and 'datatypes' in metadata:
datatypes = metadata[ 'datatypes' ]
- folder_id, datatypes_root_folder = build_datatypes_folder( folder_id, datatypes )
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
# Invalid tools container.
if metadata and 'invalid_tools' in metadata:
invalid_tool_configs = metadata[ 'invalid_tools' ]
- folder_id, invalid_tools_root_folder = build_invalid_tools_folder( folder_id,
- invalid_tool_configs,
- changeset_revision,
- repository=repository,
- label='Invalid tools' )
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ invalid_tool_configs,
+ changeset_revision,
+ repository=repository,
+ label='Invalid tools' )
containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
# Readme files container.
readme_files_dict = build_readme_files_dict( repository_metadata )
- folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
toolshed_base_url = str( url_for( '/', qualified=True ) ).rstrip( '/' )
- folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=changeset_revision,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ repository_name=repository.name,
+ repository_owner=repository.user.username,
+ changeset_revision=changeset_revision,
+ folder_id=folder_id,
+ repository_dependencies=repository_dependencies )
if repository_dependencies_root_folder:
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
# Tool dependencies container.
if metadata and 'tool_dependencies' in metadata:
tool_dependencies = metadata[ 'tool_dependencies' ]
- folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=False )
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Valid tools container.
if metadata and 'tools' in metadata:
valid_tools = metadata[ 'tools' ]
- folder_id, valid_tools_root_folder = build_tools_folder( folder_id, valid_tools, repository, changeset_revision, label='Valid tools' )
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ valid_tools,
+ repository,
+ changeset_revision,
+ label='Valid tools' )
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Workflows container.
if metadata and 'workflows' in metadata:
workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
@@ -190,6 +194,29 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
+def can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
+ """
+ Handle circular repository dependencies that could result in an infinite loop by determining if it is safe to add an entry to the
+ repository dependencies container.
+ """
+ # First check for an exact match - if this is true, the changeset revision was not updated.
+ repository_dependency_as_key = container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ],
+ repository_dependency[ 1 ],
+ repository_dependency[ 2 ],
+ repository_dependency[ 3] )
+ current_repository_key_as_repository_dependency = current_repository_key.split( container_util.STRSEP )
+ if repository_dependency_as_key in all_repository_dependencies:
+ val = all_repository_dependencies[ repository_dependency_as_key ]
+ if current_repository_key_as_repository_dependency in val:
+ return False
+ # Now handle the case where an update to the changeset revision was done, so everything will match except the changeset_revision.
+ repository_dependency_as_partial_key = container_util.STRSEP.join( [ repository_dependency[ 0 ], repository_dependency[ 1 ], repository_dependency[ 2 ] ] )
+ for key in all_repository_dependencies:
+ if key.startswith( repository_dependency_as_partial_key ):
+ val = all_repository_dependencies[ key ]
+ if current_repository_key_as_repository_dependency in val:
+ return False
+ return True
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
@@ -736,8 +763,8 @@
elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
full_path = str( os.path.abspath( os.path.join( root, name ) ) )
if os.path.getsize( full_path ) > 0:
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ if not ( checkers.check_binary( full_path ) or checkers.check_image( full_path ) or checkers.check_gzip( full_path )[ 0 ]
+ or checkers.check_bz2( full_path )[ 0 ] or checkers.check_zip( full_path ) ):
try:
# Make sure we're looking at a tool config and not a display application config or something else.
element_tree = util.parse_xml( full_path )
@@ -779,7 +806,7 @@
fp = open( relative_path, 'rb' )
workflow_text = fp.read()
fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
+ exported_workflow_dict = json.from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
if readme_files:
@@ -1189,106 +1216,112 @@
repository_dependencies_dict = metadata[ 'repository_dependencies' ]
# The repository_dependencies entry in the metadata is a dictionary that may have a value for a 'description' key. We want to
# store the value of this key only once, the first time through this recursive method.
- repository_dependencies_root_key = generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=repository_metadata.changeset_revision )
+ current_repository_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
+ repository_name=repository.name,
+ repository_owner=repository.user.username,
+ changeset_revision=repository_metadata.changeset_revision )
if not all_repository_dependencies:
- # Initialize the all_repository_dependencies dictionary.
- all_repository_dependencies[ 'root_key' ] = repository_dependencies_root_key
- all_repository_dependencies[ repository_dependencies_root_key ] = []
+ # Initialize the all_repository_dependencies dictionary. It's safe to assume that current_repository_key in this case will have a value.
+ all_repository_dependencies[ 'root_key' ] = current_repository_key
+ all_repository_dependencies[ current_repository_key ] = []
if 'description' not in all_repository_dependencies:
description = repository_dependencies_dict.get( 'description', None )
all_repository_dependencies[ 'description' ] = description
# The next key of interest in repository_dependencies_dict is 'repository_dependencies', which is a list of tuples.
repository_dependencies_tups = repository_dependencies_dict[ 'repository_dependencies' ]
+ if repository_dependencies_tups and current_repository_key:
+ # Remove all repository dependencies that point to a revision within its own repository.
+ repository_dependencies_tups = remove_ropository_dependency_reference_to_self( repository_dependencies_tups, current_repository_key )
for repository_dependency in repository_dependencies_tups:
- # Skip repository dependencies that point to the root repository.
- check_key = generate_repository_dependencies_key_for_repository( toolshed_base_url=repository_dependency[ 0 ],
- repository_name=repository_dependency[ 1 ],
- repository_owner=repository_dependency[ 2 ],
- changeset_revision=repository_dependency[ 3 ] )
- if check_key == repository_dependencies_root_key:
- handled.append( repository_dependency )
- elif repository_dependency not in handled and repository_dependency not in repository_dependencies:
+ if repository_dependency not in handled and repository_dependency not in repository_dependencies:
+ # The following if statement handles repositories dependencies that are circular in nature.
+ if current_repository_key:
+ if current_repository_key in all_repository_dependencies:
+ # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies.
+ all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
+ if repository_dependency not in all_repository_dependencies_val:
+ all_repository_dependencies_val.append( repository_dependency )
+ all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
+ elif can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
+ # We don't have a circular dependency that could result in an infinite loop.
+ all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
repository_dependencies.append( repository_dependency )
else:
- repository_dependencies_root_key = None
- if repository_dependencies:
- repository_dependency = repository_dependencies.pop( 0 )
- # Cast unicode to string.
- repository_dependency = [ str( item ) for item in repository_dependency ]
- tool_shed, name, owner, changeset_revision = repository_dependency
- if repository_dependencies_root_key:
- if repository_dependencies_root_key in all_repository_dependencies:
- # See if this repository_dependency is contained in the list associated with the repository_dependencies_root_key.
- all_repository_dependencies_val = all_repository_dependencies[ repository_dependencies_root_key ]
- if repository_dependency not in all_repository_dependencies_val:
- all_repository_dependencies_val.append( repository_dependency )
- all_repository_dependencies[ repository_dependencies_root_key ] = all_repository_dependencies_val
- handled.append( repository_dependency )
- else:
- # Insert this repository_dependency.
- all_repository_dependencies[ repository_dependencies_root_key ] = [ repository_dependency ]
+ # The current repository does not have repository dependencies defined for it.
+ current_repository_key = None
+ # The following if statement handles repositories dependencies that are circular in nature.
+ if current_repository_key and current_repository_key in all_repository_dependencies:
+ repository_dependencies_tups = [ rd for rd in all_repository_dependencies[ current_repository_key ] ]
+ if repository_dependencies_tups:
+ repository_dependency = repository_dependencies_tups.pop( 0 )
+ if repository_dependency not in handled:
handled.append( repository_dependency )
- if tool_shed_is_this_tool_shed( tool_shed ):
- # The repository is in the current tool shed.
- required_repository = get_repository_by_name_and_owner( trans, name, owner )
- required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
- trans.security.encode_id( required_repository.id ),
- changeset_revision )
- if required_repository_metadata:
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- else:
- # The repository changeset_revision is no longer installable, so see if there's been an update.
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- required_changeset_revision = get_next_downloadable_changeset_revision( required_repository, required_repo, changeset_revision )
+ if repository_dependency in repository_dependencies:
+ repository_dependencies.remove( repository_dependency )
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if tool_shed_is_this_tool_shed( toolshed ):
+ required_repository = get_repository_by_name_and_owner( trans, name, owner )
required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
trans.security.encode_id( required_repository.id ),
- required_changeset_revision )
+ changeset_revision )
if required_repository_metadata:
- # The changeset_revision defined in a repository_dependencies.xml file is outdated, so we need to fix appropriate
- # entries in our all_repository_dependencies dictionary.
- updated_repository_dependency = [ tool_shed, name, owner, required_changeset_revision ]
- for k, v in all_repository_dependencies.items():
- if k in [ 'root_key', 'description' ]:
- continue
- for i, current_repository_dependency in enumerate( v ):
- current_tool_shed, current_name, current_owner, current_changeset_revision = current_repository_dependency
- if tool_shed == current_tool_shed and name == current_name and owner == current_owner and changeset_revision == current_changeset_revision:
- if updated_repository_dependency in v:
- # We've already stored the updated repository_dependency, so remove the outdated one.
- v = v.remove( repository_dependency )
- else:
- # Store the updated repository_dependency.
- v[ i ] = updated_repository_dependency
- all_repository_dependencies[ k ] = v
- if required_repository_metadata:
- # The required_repository_metadata changeset_revision is installable.
- required_metadata = required_repository_metadata.metadata
- if required_metadata:
- return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
- repository=required_repository,
- repository_metadata=required_repository_metadata,
- toolshed_base_url=tool_shed,
- repository_dependencies=repository_dependencies,
- all_repository_dependencies=all_repository_dependencies,
- handled=handled )
- else:
- # The repository is in a different tool shed, so build an url and send a request.
- raise Exception( "Repository dependencies that refer to repositories in other tool sheds is not yet supported." )
+ required_repo_dir = required_repository.repo_path( trans.app )
+ required_repo = hg.repository( get_configured_ui(), required_repo_dir )
+ else:
+ # The repository changeset_revision is no longer installable, so see if there's been an update.
+ required_repo_dir = required_repository.repo_path( trans.app )
+ required_repo = hg.repository( get_configured_ui(), required_repo_dir )
+ required_changeset_revision = get_next_downloadable_changeset_revision( required_repository, required_repo, changeset_revision )
+ required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( required_repository.id ),
+ required_changeset_revision )
+ if required_repository_metadata:
+ # The changeset_revision defined in a repository_dependencies.xml file is outdated, so we need to fix appropriate
+ # entries in our all_repository_dependencies dictionary.
+ updated_repository_dependency = [ toolshed, name, owner, required_changeset_revision ]
+ for k, v in all_repository_dependencies.items():
+ if k in [ 'root_key', 'description' ]:
+ continue
+ for i, current_repository_dependency in enumerate( v ):
+ cts, cn, co, ccr = current_repository_dependency
+ if toolshed == cts and name == cn and owner == co and changeset_revision == ccr:
+ if updated_repository_dependency in v:
+ # We've already stored the updated repository_dependency, so remove the outdated one.
+ v = v.remove( repository_dependency )
+ all_repository_dependencies[ k ] = v
+ else:
+ # Store the updated repository_dependency.
+ v[ i ] = updated_repository_dependency
+ all_repository_dependencies[ k ] = v
+ if required_repository_metadata:
+ # The required_repository_metadata changeset_revision is installable.
+ required_metadata = required_repository_metadata.metadata
+ if required_metadata:
+ for repository_dependency in repository_dependencies_tups:
+ if repository_dependency not in repository_dependencies:
+ repository_dependencies.append( repository_dependency )
+ return get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=required_repo,
+ repository=required_repository,
+ repository_metadata=required_repository_metadata,
+ toolshed_base_url=toolshed,
+ repository_dependencies=repository_dependencies,
+ all_repository_dependencies=all_repository_dependencies,
+ handled=handled )
+ else:
+ # The repository is in a different tool shed, so build an url and send a request.
+ error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
+ error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
+ log.debug( error_message )
return all_repository_dependencies
def get_repository_file_contents( file_path ):
- if is_gzip( file_path ):
+ if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
- elif is_bz2( file_path ):
+ elif checkers.is_bz2( file_path ):
safe_str = to_safe_string( '\nbz2 compressed file\n' )
- elif check_zip( file_path ):
+ elif checkers.check_zip( file_path ):
safe_str = to_safe_string( '\nzip compressed file\n' )
- elif check_binary( file_path ):
+ elif checkers.check_binary( file_path ):
safe_str = to_safe_string( '\nBinary file\n' )
else:
safe_str = ''
@@ -1526,6 +1559,18 @@
shutil.rmtree( dir )
except:
pass
+def remove_ropository_dependency_reference_to_self( repository_dependencies, repository_key ):
+ """Remove all repository dependencies that point to a revision within its own repository."""
+ clean_repository_dependencies = []
+ repository_tup = repository_key.split( container_util.STRSEP )
+ rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_tup
+ for repository_dependency in repository_dependencies:
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if rd_toolshed == toolshed and rd_name == name and rd_owner == owner:
+ log.debug( "Removing repository dependency for repository %s owned by %s since it refers to a revision within itself." % ( name, owner ) )
+ else:
+ clean_repository_dependencies.append( repository_dependency )
+ return clean_repository_dependencies
def remove_tool_dependency_installation_directory( dependency_install_dir ):
if os.path.exists( dependency_install_dir ):
try:
diff -r facdd387b85e814df7428ca8cbb71828d0ec48a2 -r 6d86fa9ca5977060372704346a1e9a2b6308a292 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -188,7 +188,7 @@
else:
metadata_dict = {}
if 'tool_dependencies' not in metadata_dict:
- message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml". '
+ message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file". '
status = 'warning'
log.debug( 'Error in tool dependencies for repository %s: %s.' % ( repository.id, repository.name ) )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
diff -r facdd387b85e814df7428ca8cbb71828d0ec48a2 -r 6d86fa9ca5977060372704346a1e9a2b6308a292 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -212,7 +212,7 @@
repository_dependencies_folder.description = repository_dependencies.get( 'description', None )
repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
del repository_dependencies[ 'description' ]
- # The remaining keys in repository_dependencies should all be folders.
+ # The current keys in repository_dependencies should all be folders.
folder_keys = repository_dependencies.keys()
# If repository_dependencies_folder_key is an entry in repository_dependencies, process it first.
if repository_dependencies_folder_key in repository_dependencies:
@@ -404,7 +404,7 @@
STRSEP,
str( changeset_revision ) )
def get_folder( folder, key ):
- if folder and folder.key == key:
+ if folder.key == key:
return folder
for sub_folder in folder.folders:
return get_folder( sub_folder, key )
@@ -421,25 +421,30 @@
folder_keys, folder_id, repository_dependency_id, repository_name, repository_owner, changeset_revision,
key, val ):
# Only create a new folder object if necessary.
- folder = get_folder( repository_dependencies_root_folder, key )
+ folder = get_folder( repository_dependencies_folder, key )
if not folder:
folder_id += 1
label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, key )
folder = Folder( id=folder_id, key=key, label=label, parent=repository_dependencies_folder )
for repository_dependency_tup in val:
toolshed, name, owner, changeset_revision = repository_dependency_tup
- if is_root_repository( repository_dependencies_folder_key, toolshed, name, owner ):
- # Do not include repository dependencies that point to a revision within the same repository.
- continue
if is_or_should_be_folder( folder_keys, toolshed, name, owner, changeset_revision ):
check_folder_key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision )
- if get_folder( repository_dependencies_root_folder, check_folder_key ):
- continue
+ check_folder = get_folder( repository_dependencies_folder, check_folder_key )
+ if check_folder:
+ repository_dependency_id += 1
+ repository_dependency = RepositoryDependency( id=repository_dependency_id,
+ toolshed=toolshed,
+ repository_name=name,
+ repository_owner=owner,
+ changeset_revision=changeset_revision )
+ if not check_folder.contains_repository_dependency( repository_dependency ):
+ check_folder.repository_dependencies.append( repository_dependency )
else:
# Create a new folder, which may be populated later.
folder_id += 1
label = generate_repository_dependencies_folder_label_from_key( name, owner, changeset_revision, key )
- sub_folder = Folder( id=folder_id, key=check_folder_key, label=label, parent=repository_dependencies_folder )
+ sub_folder = Folder( id=folder_id, key=check_folder_key, label=label, parent=folder )
folder.folders.append( sub_folder )
else:
repository_dependency_id += 1
@@ -458,11 +463,6 @@
def is_or_should_be_folder( folder_keys, toolshed, repository_name, repository_owner, changeset_revision ):
key = '%s%s%s%s%s%s%s' % ( toolshed, STRSEP, repository_name, STRSEP, repository_owner, STRSEP, changeset_revision )
return key in folder_keys
-def is_root_repository( repository_dependencies_folder_key, toolshed, repository_name, repository_owner ):
- # Return True if a repository dependency points to a revision within it's own repository.
- repository_dependencies_folder_tup = repository_dependencies_folder_key.split( STRSEP )
- rdf_toolshed, rdf_repository_name, rdf_repository_owner, rdf_changeset_revision = repository_dependencies_folder_tup
- return rdf_toolshed == toolshed and rdf_repository_name == repository_name and rdf_repository_owner == repository_owner
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/659392eae2a8/
changeset: 659392eae2a8
user: inithello
date: 2012-12-07 22:38:26
summary: Revert tool migration.
affected #: 35 files
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 lib/galaxy/tool_shed/migrate/versions/0008_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0008_tools.py
+++ /dev/null
@@ -1,21 +0,0 @@
-"""
-The following tools have been eliminated from the distribution:
-Add column to an existing dataset, Change Case of selected columns,
-Condense consecutive characters, Convert delimiters to TAB,
-Cut columns from a table, Merge Columns together, Remove beginning of a file,
-Select first lines from a dataset, Select last lines from a dataset,
-and Trim leading or trailing characters. The tools are now available in the
-repositories named add_value, change_case, condense_characters,
-convert_characters, cut_columns, merge_cols, remove_beginning,
-show_beginning, show_tail, and trimmer from the main Galaxy tool shed at
-http://toolshed.g2.bx.psu.edu, and will be installed into your
-local Galaxy instance at the location discussed above by running
-the following command.
-"""
-
-import sys
-
-def upgrade():
- print __doc__
-def downgrade():
- pass
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 scripts/migrate_tools/0008_tools.sh
--- a/scripts/migrate_tools/0008_tools.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-
-cd `dirname $0`/../..
-python ./scripts/migrate_tools/migrate_tools.py 0008_tools.xml $@
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 scripts/migrate_tools/0008_tools.xml
--- a/scripts/migrate_tools/0008_tools.xml
+++ /dev/null
@@ -1,33 +0,0 @@
-<?xml version="1.0"?>
-<toolshed name="toolshed.g2.bx.psu.edu">
- <repository name="add_value" description="Add a value as a new column." changeset_revision="181dd378275c">
- <tool id="addValue" version="1.0.0" file="fixedValueColumn.xml" />
- </repository>
- <repository name="change_case" description="Change the case of a column." changeset_revision="e6f966602870">
- <tool id="ChangeCase" version="1.0.0" file="changeCase.xml" />
- </repository>
- <repository name="condense_characters" description="Condense repeated characters." changeset_revision="2c08781560de">
- <tool id="Condense characters1" version="1.0.0" file="condense_characters.xml" />
- </repository>
- <repository name="convert_characters" description="Convert delimiters to TAB." changeset_revision="64d46676a13e">
- <tool id="Convert characters1" version="1.0.0" file="convert_characters.xml" />
- </repository>
- <repository name="cut_columns" description="Remove or rearrange columns." changeset_revision="34c29e183ef7">
- <tool id="Cut1" version="1.0.1" file="cutWrapper.xml" />
- </repository>
- <repository name="merge_cols" description="Merge columns together." changeset_revision="28ca7552e884">
- <tool id="mergeCols1" version="1.0.1" file="mergeCols.xml" />
- </repository>
- <repository name="remove_beginning" description="Remove lines from the beginning of a file." changeset_revision="d9b82504a321">
- <tool id="Remove beginning1" version="1.0.0" file="remove_beginning.xml" />
- </repository>
- <repository name="show_beginning" description="Select lines from the beginning of a file." changeset_revision="ecca14446e6a">
- <tool id="Show beginning1" version="1.0.0" file="headWrapper.xml" />
- </repository>
- <repository name="show_tail" description="Select lines from the end of a file." changeset_revision="8bb4d908a523">
- <tool id="Show tail1" version="1.0.0" file="tailWrapper.xml" />
- </repository>
- <repository name="trimmer" description="Trim trailing characters from each line or column." changeset_revision="f862a6e4d096">
- <tool id="trimmer" version="0.0.1" file="trimmer.xml" />
- </repository>
-</toolshed>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/a.txt
--- /dev/null
+++ b/test-data/a.txt
@@ -0,0 +1,15 @@
+ CHR SNP BP A1 TEST NMISS BETA STAT P
+ 1 rs1181876 3671541 T DOMDEV 958 -1.415 -3.326 0.0009161
+ 1 rs10492923 5092886 C ADD 1007 5.105 4.368 1.382e-05
+ 1 rs10492923 5092886 C DOMDEV 1007 -5.612 -4.249 2.35e-05
+ 1 rs10492923 5092886 C GENO_2DF 1007 NA 19.9 4.775e-05
+ 1 rs1801133 11778965 T ADD 1022 1.23 3.97 7.682e-05
+ 1 rs1801133 11778965 T GENO_2DF 1022 NA 16.07 0.0003233
+ 1 rs1361912 12663121 A ADD 1021 12.69 4.093 4.596e-05
+ 1 rs1361912 12663121 A DOMDEV 1021 -12.37 -3.945 8.533e-05
+ 1 rs1361912 12663121 A GENO_2DF 1021 NA 17.05 0.0001982
+ 1 rs1009806 19373138 G ADD 1021 -1.334 -3.756 0.0001826
+ 1 rs1009806 19373138 G GENO_2DF 1021 NA 19.36 6.244e-05
+ 1 rs873654 29550948 A DOMDEV 1012 1.526 3.6 0.0003339
+ 1 rs10489527 36800027 C ADD 1016 12.67 4.114 4.211e-05
+ 1 rs10489527 36800027 C DOMDEV 1016 -13.05 -4.02 6.249e-05
\ No newline at end of file
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/changeCase_out1.tabular
--- /dev/null
+++ b/test-data/changeCase_out1.tabular
@@ -0,0 +1,10 @@
+CHR1 4225 19670
+CHR10 6 8
+CHR1 24417 24420
+CHR6_HLA_HAP2 0 150
+CHR2 1 5
+CHR10 2 10
+CHR1 30 55
+CHRY 1 20
+CHR1 1225979 42287290
+CHR10 7 8
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/changeCase_out2.tabular
--- /dev/null
+++ b/test-data/changeCase_out2.tabular
@@ -0,0 +1,65 @@
+CHR1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
+CHR1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
+CHR1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
+CHR1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
+CHR10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
+CHR11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
+CHR11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
+CHR11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
+CHR11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
+CHR12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
+CHR13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
+CHR14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
+CHR15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
+CHR15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
+CHR15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
+CHR15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
+CHR16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
+CHR16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
+CHR16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
+CHR16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
+CHR18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
+CHR18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
+CHR18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
+CHR18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
+CHR19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
+CHR19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
+CHR19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
+CHR19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
+CHR2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
+CHR2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
+CHR2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
+CHR2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
+CHR20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
+CHR20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
+CHR20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
+CHR20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
+CHR21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
+CHR21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
+CHR21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
+CHR21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
+CHR22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
+CHR22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
+CHR22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
+CHR22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
+CHR5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
+CHR5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
+CHR5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
+CHR5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
+CHR6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
+CHR6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
+CHR6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
+CHR6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
+CHR7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
+CHR7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
+CHR7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
+CHR7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
+CHR8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
+CHR9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
+CHR9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
+CHR9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
+CHR9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
+CHRX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
+CHRX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
+CHRX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
+CHRX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/eq-addvalue.dat
--- /dev/null
+++ b/test-data/eq-addvalue.dat
@@ -0,0 +1,65 @@
+chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 - 1
+chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 + 1
+chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 - 1
+chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 + 1
+chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 - 1
+chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 - 1
+chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 + 1
+chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 - 1
+chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 + 1
+chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 - 1
+chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 + 1
+chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 - 1
+chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 - 1
+chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 + 1
+chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 - 1
+chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 + 1
+chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 + 1
+chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 - 1
+chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 + 1
+chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 - 1
+chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 - 1
+chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 + 1
+chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 - 1
+chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 + 1
+chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 + 1
+chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 - 1
+chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 + 1
+chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 - 1
+chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 + 1
+chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 - 1
+chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 + 1
+chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 - 1
+chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 - 1
+chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 + 1
+chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 - 1
+chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 + 1
+chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 + 1
+chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 - 1
+chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 + 1
+chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 - 1
+chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 + 1
+chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 - 1
+chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 + 1
+chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 - 1
+chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + 1
+chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 - 1
+chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 + 1
+chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 - 1
+chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 - 1
+chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 + 1
+chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 - 1
+chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 + 1
+chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 + 1
+chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 - 1
+chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 + 1
+chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 - 1
+chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 - 1
+chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 + 1
+chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 - 1
+chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 + 1
+chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 - 1
+chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 + 1
+chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 - 1
+chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 + 1
+chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 - 1
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/eq-condense.dat
--- /dev/null
+++ b/test-data/eq-condense.dat
@@ -0,0 +1,65 @@
+chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
+chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
+chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
+chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
+chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
+chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
+chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
+chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
+chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
+chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
+chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
+chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
+chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
+chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
+chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
+chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
+chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
+chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
+chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
+chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
+chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
+chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
+chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
+chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
+chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
+chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
+chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
+chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
+chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
+chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
+chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
+chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
+chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
+chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
+chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
+chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
+chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
+chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
+chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
+chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
+chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
+chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
+chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
+chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
+chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
+chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
+chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
+chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
+chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
+chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
+chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
+chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
+chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
+chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
+chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
+chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
+chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
+chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
+chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
+chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
+chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
+chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
+chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
+chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
+chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/eq-convert.dat
--- /dev/null
+++ b/test-data/eq-convert.dat
@@ -0,0 +1,65 @@
+chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
+chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
+chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
+chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
+chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
+chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
+chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
+chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
+chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
+chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
+chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
+chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
+chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
+chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
+chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
+chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
+chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
+chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
+chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
+chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
+chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
+chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
+chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
+chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
+chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
+chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
+chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
+chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
+chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
+chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
+chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
+chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
+chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
+chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
+chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
+chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
+chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
+chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
+chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
+chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
+chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
+chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
+chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
+chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
+chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
+chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
+chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
+chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
+chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
+chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
+chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
+chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
+chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
+chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
+chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
+chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
+chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
+chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
+chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
+chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
+chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
+chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
+chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
+chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
+chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/eq-cut.dat
--- /dev/null
+++ b/test-data/eq-cut.dat
@@ -0,0 +1,65 @@
+chr1 CCDS989.1_cds_0_0_chr1_147962193_r 147962192 147962580
+chr1 CCDS990.1_cds_0_0_chr1_147984546_f 147984545 147984630
+chr1 CCDS993.1_cds_0_0_chr1_148078401_r 148078400 148078582
+chr1 CCDS996.1_cds_0_0_chr1_148185137_f 148185136 148185276
+chr10 CCDS7248.1_cds_0_0_chr10_55251624_r 55251623 55253124
+chr11 CCDS8374.1_cds_0_0_chr11_116124408_r 116124407 116124501
+chr11 CCDS8377.1_cds_0_0_chr11_116206509_f 116206508 116206563
+chr11 CCDS8378.1_cds_0_0_chr11_116211734_r 116211733 116212337
+chr11 CCDS7726.1_cds_0_0_chr11_1812378_f 1812377 1812407
+chr12 CCDS8736.1_cds_0_0_chr12_38440095_r 38440094 38440321
+chr13 CCDS9526.1_cds_0_0_chr13_112381695_f 112381694 112381953
+chr14 CCDS9949.1_cds_0_0_chr14_98710241_r 98710240 98712285
+chr15 CCDS10096.1_cds_0_0_chr15_41486873_r 41486872 41487060
+chr15 CCDS10097.1_cds_0_0_chr15_41673709_f 41673708 41673857
+chr15 CCDS10098.1_cds_0_0_chr15_41679162_r 41679161 41679250
+chr15 CCDS10101.1_cds_0_0_chr15_41826030_f 41826029 41826196
+chr16 CCDS10397.1_cds_0_0_chr16_142909_f 142908 143003
+chr16 CCDS10401.1_cds_0_0_chr16_179964_r 179963 180135
+chr16 CCDS10402.1_cds_0_0_chr16_244414_f 244413 244681
+chr16 CCDS10403.1_cds_0_0_chr16_259269_r 259268 259383
+chr18 CCDS11891.1_cds_0_0_chr18_23786115_r 23786114 23786321
+chr18 CCDS11985.1_cds_0_0_chr18_59406882_f 59406881 59407046
+chr18 CCDS11986.1_cds_0_0_chr18_59455933_r 59455932 59456337
+chr18 CCDS11988.1_cds_0_0_chr18_59600587_f 59600586 59600754
+chr19 CCDS12866.1_cds_0_0_chr19_59068596_f 59068595 59069564
+chr19 CCDS12872.1_cds_0_0_chr19_59236027_r 59236026 59236146
+chr19 CCDS12877.1_cds_0_0_chr19_59297999_f 59297998 59298008
+chr19 CCDS12878.1_cds_0_0_chr19_59302169_r 59302168 59302288
+chr2 CCDS2120.1_cds_0_0_chr2_118288584_f 118288583 118288668
+chr2 CCDS2121.1_cds_0_0_chr2_118394149_r 118394148 118394202
+chr2 CCDS2441.1_cds_0_0_chr2_220190203_f 220190202 220190242
+chr2 CCDS2443.1_cds_0_0_chr2_220229610_r 220229609 220230869
+chr20 CCDS13249.1_cds_0_0_chr20_33330414_r 33330413 33330423
+chr20 CCDS13255.1_cds_0_0_chr20_33513607_f 33513606 33513792
+chr20 CCDS13256.1_cds_0_0_chr20_33579501_r 33579500 33579527
+chr20 CCDS13257.1_cds_0_0_chr20_33593261_f 33593260 33593348
+chr21 CCDS13614.1_cds_0_0_chr21_32707033_f 32707032 32707192
+chr21 CCDS13615.1_cds_0_0_chr21_32869642_r 32869641 32870022
+chr21 CCDS13620.1_cds_0_0_chr21_33321041_f 33321040 33322012
+chr21 CCDS13625.1_cds_0_0_chr21_33744995_r 33744994 33745040
+chr22 CCDS13897.1_cds_0_0_chr22_30120224_f 30120223 30120265
+chr22 CCDS13898.1_cds_0_0_chr22_30160420_r 30160419 30160661
+chr22 CCDS13901.1_cds_0_0_chr22_30665274_f 30665273 30665360
+chr22 CCDS13903.1_cds_0_0_chr22_30939055_r 30939054 30939266
+chr5 CCDS4149.1_cds_0_0_chr5_131424299_f 131424298 131424460
+chr5 CCDS4151.1_cds_0_0_chr5_131556602_r 131556601 131556672
+chr5 CCDS4152.1_cds_0_0_chr5_131621327_f 131621326 131621419
+chr5 CCDS4155.1_cds_0_0_chr5_131847542_r 131847541 131847666
+chr6 CCDS5061.1_cds_0_0_chr6_108299601_r 108299600 108299744
+chr6 CCDS5063.1_cds_0_0_chr6_108594663_f 108594662 108594687
+chr6 CCDS5064.1_cds_0_0_chr6_108640046_r 108640045 108640151
+chr6 CCDS5067.1_cds_0_0_chr6_108722977_f 108722976 108723115
+chr7 CCDS5760.1_cds_0_0_chr7_113660518_f 113660517 113660685
+chr7 CCDS5771.1_cds_0_0_chr7_116512160_r 116512159 116512389
+chr7 CCDS5773.1_cds_0_0_chr7_116714100_f 116714099 116714152
+chr7 CCDS5774.1_cds_0_0_chr7_116945542_r 116945541 116945787
+chr8 CCDS6324.1_cds_0_0_chr8_118881132_r 118881131 118881317
+chr9 CCDS6914.1_cds_0_0_chr9_128764157_f 128764156 128764189
+chr9 CCDS6915.1_cds_0_0_chr9_128787520_r 128787519 128789136
+chr9 CCDS6917.1_cds_0_0_chr9_128882428_f 128882427 128882523
+chr9 CCDS6919.1_cds_0_0_chr9_128937230_r 128937229 128937445
+chrX CCDS14606.1_cds_0_0_chrX_122745048_f 122745047 122745924
+chrX CCDS14733.1_cds_0_0_chrX_152648965_r 152648964 152649196
+chrX CCDS14735.1_cds_0_0_chrX_152691447_f 152691446 152691471
+chrX CCDS14736.1_cds_0_0_chrX_152694030_r 152694029 152694263
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/mergeCols.dat
--- /dev/null
+++ b/test-data/mergeCols.dat
@@ -0,0 +1,65 @@
+chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 - CCDS989.1_cds_0_0_chr1_147962193_rchr1-
+chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 + CCDS990.1_cds_0_0_chr1_147984546_fchr1+
+chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 - CCDS993.1_cds_0_0_chr1_148078401_rchr1-
+chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 + CCDS996.1_cds_0_0_chr1_148185137_fchr1+
+chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 - CCDS7248.1_cds_0_0_chr10_55251624_rchr10-
+chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 - CCDS8374.1_cds_0_0_chr11_116124408_rchr11-
+chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 + CCDS8377.1_cds_0_0_chr11_116206509_fchr11+
+chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 - CCDS8378.1_cds_0_0_chr11_116211734_rchr11-
+chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 + CCDS7726.1_cds_0_0_chr11_1812378_fchr11+
+chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 - CCDS8736.1_cds_0_0_chr12_38440095_rchr12-
+chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 + CCDS9526.1_cds_0_0_chr13_112381695_fchr13+
+chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 - CCDS9949.1_cds_0_0_chr14_98710241_rchr14-
+chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 - CCDS10096.1_cds_0_0_chr15_41486873_rchr15-
+chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 + CCDS10097.1_cds_0_0_chr15_41673709_fchr15+
+chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 - CCDS10098.1_cds_0_0_chr15_41679162_rchr15-
+chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 + CCDS10101.1_cds_0_0_chr15_41826030_fchr15+
+chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 + CCDS10397.1_cds_0_0_chr16_142909_fchr16+
+chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 - CCDS10401.1_cds_0_0_chr16_179964_rchr16-
+chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 + CCDS10402.1_cds_0_0_chr16_244414_fchr16+
+chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 - CCDS10403.1_cds_0_0_chr16_259269_rchr16-
+chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 - CCDS11891.1_cds_0_0_chr18_23786115_rchr18-
+chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 + CCDS11985.1_cds_0_0_chr18_59406882_fchr18+
+chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 - CCDS11986.1_cds_0_0_chr18_59455933_rchr18-
+chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 + CCDS11988.1_cds_0_0_chr18_59600587_fchr18+
+chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 + CCDS12866.1_cds_0_0_chr19_59068596_fchr19+
+chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 - CCDS12872.1_cds_0_0_chr19_59236027_rchr19-
+chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 + CCDS12877.1_cds_0_0_chr19_59297999_fchr19+
+chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 - CCDS12878.1_cds_0_0_chr19_59302169_rchr19-
+chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 + CCDS2120.1_cds_0_0_chr2_118288584_fchr2+
+chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 - CCDS2121.1_cds_0_0_chr2_118394149_rchr2-
+chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 + CCDS2441.1_cds_0_0_chr2_220190203_fchr2+
+chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 - CCDS2443.1_cds_0_0_chr2_220229610_rchr2-
+chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 - CCDS13249.1_cds_0_0_chr20_33330414_rchr20-
+chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 + CCDS13255.1_cds_0_0_chr20_33513607_fchr20+
+chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 - CCDS13256.1_cds_0_0_chr20_33579501_rchr20-
+chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 + CCDS13257.1_cds_0_0_chr20_33593261_fchr20+
+chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 + CCDS13614.1_cds_0_0_chr21_32707033_fchr21+
+chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 - CCDS13615.1_cds_0_0_chr21_32869642_rchr21-
+chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 + CCDS13620.1_cds_0_0_chr21_33321041_fchr21+
+chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 - CCDS13625.1_cds_0_0_chr21_33744995_rchr21-
+chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 + CCDS13897.1_cds_0_0_chr22_30120224_fchr22+
+chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 - CCDS13898.1_cds_0_0_chr22_30160420_rchr22-
+chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 + CCDS13901.1_cds_0_0_chr22_30665274_fchr22+
+chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 - CCDS13903.1_cds_0_0_chr22_30939055_rchr22-
+chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + CCDS4149.1_cds_0_0_chr5_131424299_fchr5+
+chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 - CCDS4151.1_cds_0_0_chr5_131556602_rchr5-
+chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 + CCDS4152.1_cds_0_0_chr5_131621327_fchr5+
+chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 - CCDS4155.1_cds_0_0_chr5_131847542_rchr5-
+chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 - CCDS5061.1_cds_0_0_chr6_108299601_rchr6-
+chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 + CCDS5063.1_cds_0_0_chr6_108594663_fchr6+
+chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 - CCDS5064.1_cds_0_0_chr6_108640046_rchr6-
+chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 + CCDS5067.1_cds_0_0_chr6_108722977_fchr6+
+chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 + CCDS5760.1_cds_0_0_chr7_113660518_fchr7+
+chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 - CCDS5771.1_cds_0_0_chr7_116512160_rchr7-
+chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 + CCDS5773.1_cds_0_0_chr7_116714100_fchr7+
+chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 - CCDS5774.1_cds_0_0_chr7_116945542_rchr7-
+chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 - CCDS6324.1_cds_0_0_chr8_118881132_rchr8-
+chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 + CCDS6914.1_cds_0_0_chr9_128764157_fchr9+
+chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 - CCDS6915.1_cds_0_0_chr9_128787520_rchr9-
+chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 + CCDS6917.1_cds_0_0_chr9_128882428_fchr9+
+chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 - CCDS6919.1_cds_0_0_chr9_128937230_rchr9-
+chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 + CCDS14606.1_cds_0_0_chrX_122745048_fchrX+
+chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 - CCDS14733.1_cds_0_0_chrX_152648965_rchrX-
+chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 + CCDS14735.1_cds_0_0_chrX_152691447_fchrX+
+chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 - CCDS14736.1_cds_0_0_chrX_152694030_rchrX-
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/trimmer_a_f_c0_s1_e13_i62.dat
--- /dev/null
+++ b/test-data/trimmer_a_f_c0_s1_e13_i62.dat
@@ -0,0 +1,5 @@
+12345 abcdef
+67890 ghjkl g
+>assa lljlj ljlj
+sasas hghg hg
+@dgf gfgf gfg
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/trimmer_a_f_c2_s1_e2_i62.dat
--- /dev/null
+++ b/test-data/trimmer_a_f_c2_s1_e2_i62.dat
@@ -0,0 +1,5 @@
+12345 ab xyz
+67890 gh ghjt
+>assa lljlj ljlj
+sasas hg hghg
+@dgf gf gfgf
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 test-data/trimmer_tab_delimited.dat
--- /dev/null
+++ b/test-data/trimmer_tab_delimited.dat
@@ -0,0 +1,5 @@
+12345 abcdef xyz
+67890 ghjkl ghjt
+>assa lljlj ljlj
+sasas hghg hghg
+@dgf gfgf gfgf
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -44,12 +44,21 @@
<tool file="extract/liftOver_wrapper.xml" /></section><section name="Text Manipulation" id="textutil">
+ <tool file="filters/fixedValueColumn.xml" /><tool file="stats/column_maker.xml" /><tool file="filters/catWrapper.xml" /><tool file="filters/cutWrapper.xml" />
+ <tool file="filters/mergeCols.xml" />
+ <tool file="filters/convert_characters.xml" /><tool file="filters/CreateInterval.xml" />
+ <tool file="filters/cutWrapper.xml" />
+ <tool file="filters/changeCase.xml" /><tool file="filters/pasteWrapper.xml" />
+ <tool file="filters/remove_beginning.xml" /><tool file="filters/randomlines.xml" />
+ <tool file="filters/headWrapper.xml" />
+ <tool file="filters/tailWrapper.xml" />
+ <tool file="filters/trimmer.xml" /><tool file="filters/wc_gnu.xml" /><tool file="filters/secure_hash_message_digest.xml" /><tool file="stats/dna_filtering.xml" />
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/changeCase.pl
--- /dev/null
+++ b/tools/filters/changeCase.pl
@@ -0,0 +1,58 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+my $columns = {};
+my $del = "";
+my @in = ();
+my @out = ();
+my $command = "";
+my $field = 0;
+
+# a wrapper for changing the case of columns from within galaxy
+# isaChangeCase.pl [filename] [columns] [delim] [casing] [output]
+
+die "Check arguments: $0 [filename] [columns] [delim] [casing] [output]\n" unless @ARGV == 5;
+
+# process column input
+$ARGV[1] =~ s/\s+//g;
+foreach ( split /,/, $ARGV[1] ) {
+ if (m/^c\d{1,}$/i) {
+ s/c//ig;
+ $columns->{$_} = --$_;
+ }
+}
+
+die "No columns specified, columns are not preceeded with 'c', or commas are not used to separate column numbers: $ARGV[1]\n" if keys %$columns == 0;
+
+my $column_delimiters_href = {
+ 'TAB' => q{\t},
+ 'COMMA' => ",",
+ 'DASH' => "-",
+ 'UNDERSCORE' => "_",
+ 'PIPE' => q{\|},
+ 'DOT' => q{\.},
+ 'SPACE' => q{\s+}
+};
+
+$del = $column_delimiters_href->{$ARGV[2]};
+
+open (OUT, ">$ARGV[4]") or die "Cannot create $ARGV[4]:$!\n";
+open (IN, "<$ARGV[0]") or die "Cannot open $ARGV[0]:$!\n";
+while (<IN>) {
+ chop;
+ @in = split /$del/;
+ for ( my $i = 0; $i <= $#in; ++$i) {
+ if (exists $columns->{$i}) {
+ push(@out, $ARGV[3] eq 'up' ? uc($in[$i]) : lc($in[$i]));
+ } else {
+ push(@out, $in[$i]);
+ }
+ }
+ print OUT join("\t",@out), "\n";
+ @out = ();
+}
+close IN;
+
+close OUT;
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/changeCase.xml
--- /dev/null
+++ b/tools/filters/changeCase.xml
@@ -0,0 +1,77 @@
+<tool id="ChangeCase" name="Change Case">
+ <description> of selected columns</description>
+ <stdio>
+ <exit_code range="1:" err_level="fatal" />
+ </stdio>
+ <command interpreter="perl">changeCase.pl $input "$cols" $delimiter $casing $out_file1</command>
+ <inputs>
+ <param name="input" format="txt" type="data" label="From"/>
+ <param name="cols" size="10" type="text" value="c1,c2" label="Change case of columns"/>
+ <param name="delimiter" type="select" label="Delimited by">
+ <option value="TAB">Tab</option>
+ <option value="SPACE">Whitespace</option>
+ <option value="DOT">Dot</option>
+ <option value="COMMA">Comma</option>
+ <option value="DASH">Dash</option>
+ <option value="UNDERSCORE">Underscore</option>
+ <option value="PIPE">Pipe</option>
+ </param>
+ <param name="casing" type="select" label="To">
+ <option value="up">Upper case</option>
+ <option value="lo">Lower case</option>
+ </param>
+ </inputs>
+ <outputs>
+ <data format="tabular" name="out_file1" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="input" value="1.txt" ftype="txt"/>
+ <param name="cols" value="c1"/>
+ <param name="delimiter" value="SPACE"/>
+ <param name="casing" value="up"/>
+ <output name="out_file1" file="changeCase_out1.tabular"/>
+ </test>
+ <test>
+ <param name="input" value="1.bed" ftype="bed"/>
+ <param name="cols" value="c1"/>
+ <param name="delimiter" value="TAB"/>
+ <param name="casing" value="up"/>
+ <output name="out_file1" file="changeCase_out2.tabular"/>
+ </test>
+ </tests>
+ <help>
+
+.. class:: warningmark
+
+**This tool breaks column assignments.** To re-establish column assignments run the tool and click on the pencil icon in the resulting history item.
+
+.. class:: warningmark
+
+The format of the resulting dataset from this tool is always tabular.
+
+-----
+
+**What it does**
+
+This tool selects specified columns from a dataset and converts the values of those columns to upper or lower case.
+
+- Columns are specified as **c1**, **c2**, and so on.
+- Columns can be specified in any order (e.g., **c2,c1,c6**)
+
+-----
+
+**Example**
+
+Changing columns 1 and 3 ( delimited by Comma ) to upper case in::
+
+ apple,is,good
+ windows,is,bad
+
+will result in::
+
+ APPLE is GOOD
+ WINDOWS is BAD
+
+ </help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/condense_characters.pl
--- /dev/null
+++ b/tools/filters/condense_characters.pl
@@ -0,0 +1,105 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# condenses all consecutive characters of one type
+# convert_characters.pl [input] [character] [output]
+
+die "Check arguments" unless @ARGV == 3;
+
+my $inputfile = $ARGV[0];
+my $character = $ARGV[1];
+my $outputfile = $ARGV[2];
+
+
+my $convert_from;
+my $convert_to;
+
+
+if ($character eq "s")
+{
+ $convert_from = '\s';
+}
+elsif ($character eq "T")
+{
+ $convert_from = '\t';
+}
+elsif ($character eq "Sp")
+{
+ $convert_from = " ";
+}
+elsif ($character eq "Dt")
+{
+ $convert_from = '\.';
+}
+elsif ($character eq "C")
+{
+ $convert_from = ",";
+}
+elsif ($character eq "D")
+{
+ $convert_from = "-";
+}
+elsif ($character eq "U")
+{
+ $convert_from = "_";
+}
+elsif ($character eq "P")
+{
+ $convert_from = '\|';
+}
+else
+{
+ die "Invalid value specified for convert from\n";
+}
+
+
+if ($character eq "T")
+{
+ $convert_to = "\t";
+}
+elsif ($character eq "Sp")
+{
+ $convert_to = " ";
+}
+elsif ($character eq "Dt")
+{
+ $convert_to = "\.";
+}
+elsif ($character eq "C")
+{
+ $convert_to = ",";
+}
+elsif ($character eq "D")
+{
+ $convert_to = "-";
+}
+elsif ($character eq "U")
+{
+ $convert_to = "_";
+}
+elsif ($character eq "P")
+{
+ $convert_to = "|";
+}
+else
+{
+ die "Invalid value specified for Convert to\n";
+}
+
+my $fhIn;
+open ($fhIn, "< $inputfile") or die "Cannot open source file";
+
+my $fhOut;
+open ($fhOut, "> $outputfile");
+
+while (<$fhIn>)
+{
+ my $thisLine = $_;
+ chomp $thisLine;
+ $thisLine =~ s/${convert_from}+/$convert_to/g;
+ print $fhOut $thisLine,"\n";
+}
+close ($fhIn) or die "Cannot close source file";
+close ($fhOut) or die "Cannot close output file";
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/condense_characters.xml
--- /dev/null
+++ b/tools/filters/condense_characters.xml
@@ -0,0 +1,48 @@
+<tool id="Condense characters1" name="Condense">
+ <description>consecutive characters</description>
+ <command interpreter="perl">condense_characters.pl $input $character $out_file1</command>
+ <inputs>
+<!-- <display>condense all consecutive $character from $input</display> -->
+ <param name="character" type="select" label="Condense all consecutive">
+ <option value="T">Tabs</option>
+ <option value="Sp">Spaces</option>
+ <option value="Dt">Dots</option>
+ <option value="C">Commas</option>
+ <option value="D">Dashes</option>
+ <option value="U">Underscores</option>
+ <option value="P">Pipes</option>
+ </param>
+ <param format="txt" name="input" type="data" label="in this Query"/>
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" metadata_source="input" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="character" value="T"/>
+ <param name="input" value="1.bed"/>
+ <output name="out_file1" file="eq-condense.dat"/>
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+This tool condenses all consecutive characters of a specified type.
+
+-----
+
+**Example**
+
+- Input file::
+
+ geneX,,,10,,,,,20
+ geneY,,5,,,,,12,15,9,
+
+- Condense all consecutive commas. The above file will be converted into::
+
+ geneX,10,20
+ geneY,5,12,15,9
+
+</help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/convert_characters.py
--- /dev/null
+++ b/tools/filters/convert_characters.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+#By, Guruprasad Ananda.
+
+from galaxy import eggs
+import sys, re
+
+def stop_err(msg):
+ sys.stderr.write(msg)
+ sys.exit()
+
+def main():
+ if len(sys.argv) != 4:
+ stop_err("usage: convert_characters infile from_char outfile")
+
+ try:
+ fin = open(sys.argv[1],'r')
+ except:
+ stop_err("Input file cannot be opened for reading.")
+
+ from_char = sys.argv[2]
+
+ try:
+ fout = open(sys.argv[3],'w')
+ except:
+ stop_err("Output file cannot be opened for writing.")
+
+ char_dict = {'T':'\t','s':'\s','Dt':'\.','C':',','D':'-','U':'_','P':'\|','Co':':'}
+ from_ch = char_dict[from_char] + '+' #making an RE to match 1 or more occurences.
+ skipped = 0
+
+ for line in fin:
+ line = line.strip()
+ try:
+ fout.write("%s\n" %(re.sub(from_ch,'\t',line)))
+ except:
+ skipped += 1
+
+ if skipped:
+ print "Skipped %d lines as invalid." %skipped
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/convert_characters.xml
--- /dev/null
+++ b/tools/filters/convert_characters.xml
@@ -0,0 +1,58 @@
+<tool id="Convert characters1" name="Convert">
+ <description>delimiters to TAB</description>
+ <command interpreter="python">convert_characters.py $input $convert_from $out_file1</command>
+ <inputs>
+ <param name="convert_from" type="select" label="Convert all">
+ <option value="s">Whitespaces</option>
+ <option value="T">Tabs</option>
+ <!--<option value="Sp">Spaces</option>-->
+ <option value="Dt">Dots</option>
+ <option value="C">Commas</option>
+ <option value="D">Dashes</option>
+ <option value="U">Underscores</option>
+ <option value="P">Pipes</option>
+ <option value="Co">Colons</option>
+ </param>
+ <param format="txt" name="input" type="data" label="in Query"/>
+ </inputs>
+ <outputs>
+ <data format="tabular" name="out_file1" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="convert_from" value="s"/>
+ <param name="input" value="1.bed"/>
+ <output name="out_file1" file="eq-convert.dat"/>
+ </test>
+ <test>
+ <param name="convert_from" value="s"/>
+ <param name="input" value="a.txt"/>
+ <output name="out_file1" file="a.tab"/>
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+Converts all delimiters of a specified type into TABs. Consecutive characters are condensed. For example, if columns are separated by 5 spaces they will converted into 1 tab.
+
+-----
+
+**Example**
+
+- Input file::
+
+ chrX||151283558|151283724|NM_000808_exon_8_0_chrX_151283559_r|0|-
+ chrX|151370273|151370486|NM_000808_exon_9_0_chrX_151370274_r|0|-
+ chrX|151559494|151559583|NM_018558_exon_1_0_chrX_151559495_f|0|+
+ chrX|151564643|151564711|NM_018558_exon_2_0_chrX_151564644_f||||0|+
+
+- Converting all pipe delimiters of the above file to TABs will get::
+
+ chrX 151283558 151283724 NM_000808_exon_8_0_chrX_151283559_r 0 -
+ chrX 151370273 151370486 NM_000808_exon_9_0_chrX_151370274_r 0 -
+ chrX 151559494 151559583 NM_018558_exon_1_0_chrX_151559495_f 0 +
+ chrX 151564643 151564711 NM_018558_exon_2_0_chrX_151564644_f 0 +
+
+</help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/cutWrapper.pl
--- /dev/null
+++ b/tools/filters/cutWrapper.pl
@@ -0,0 +1,77 @@
+#!/usr/bin/perl -w
+
+use strict;
+use warnings;
+
+my @columns = ();
+my $del = "";
+my @in = ();
+my @out = ();
+my $command = "";
+my $field = 0;
+
+# a wrapper for cut for use in galaxy
+# cutWrapper.pl [filename] [columns] [delim] [output]
+
+die "Check arguments\n" unless @ARGV == 4;
+
+$ARGV[1] =~ s/\s+//g;
+foreach ( split /,/, $ARGV[1] ) {
+ if (m/^c\d{1,}$/i) {
+ push (@columns, $_);
+ $columns[@columns-1] =~s/c//ig;
+ }
+}
+
+die "No columns specified, columns are not preceded with 'c', or commas are not used to separate column numbers: $ARGV[1]\n" if @columns == 0;
+
+my $column_delimiters_href = {
+ 'T' => q{\t},
+ 'C' => ",",
+ 'D' => "-",
+ 'U' => "_",
+ 'P' => q{\|},
+ 'Dt' => q{\.},
+ 'Sp' => q{\s+}
+};
+
+$del = $column_delimiters_href->{$ARGV[2]};
+
+open (OUT, ">$ARGV[3]") or die "Cannot create $ARGV[2]:$!\n";
+open (IN, "<$ARGV[0]") or die "Cannot open $ARGV[0]:$!\n";
+
+while (my $line=<IN>) {
+ if ($line =~ /^#/) {
+ #Ignore comment lines
+ } else {
+ chop($line);
+ @in = split(/$del/, $line);
+ foreach $field (@columns) {
+ if (defined($in[$field-1])) {
+ push(@out, $in[$field-1]);
+ } else {
+ push(@out, ".");
+ }
+ }
+ print OUT join("\t",@out), "\n";
+ @out = ();
+ }
+}
+
+#while (<IN>) {
+# chop;
+# @in = split /$del/;
+# foreach $field (@columns) {
+# if (defined($in[$field-1])) {
+# push(@out, $in[$field-1]);
+# } else {
+# push(@out, ".");
+# }
+# }
+# print OUT join("\t",@out), "\n";
+# @out = ();
+#}
+close IN;
+
+close OUT;
+
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/cutWrapper.xml
--- /dev/null
+++ b/tools/filters/cutWrapper.xml
@@ -0,0 +1,202 @@
+<tool id="Cut1" name="Cut" version="1.0.1">
+ <description>columns from a table</description>
+ <command interpreter="perl">cutWrapper.pl $input "$columnList" $delimiter $out_file1</command>
+ <inputs>
+ <param name="columnList" size="10" type="text" value="c1,c2" label="Cut columns"/>
+ <param name="delimiter" type="select" label="Delimited by">
+ <option value="T">Tab</option>
+ <option value="Sp">Whitespace</option>
+ <option value="Dt">Dot</option>
+ <option value="C">Comma</option>
+ <option value="D">Dash</option>
+ <option value="U">Underscore</option>
+ <option value="P">Pipe</option>
+ </param>
+ <param format="txt" name="input" type="data" label="From"/>
+ </inputs>
+ <outputs>
+ <data format="tabular" name="out_file1" >
+ <actions>
+ <conditional name="delimiter">
+ <when value="T">
+ <conditional name="input">
+ <when datatype_isinstance="interval">
+ <action type="format" default="tabular">
+ <option type="from_param" name="columnList" column="0" offset="0"><!-- chromCol is 1-->
+
+ <filter type="insert_column" column="0" value="interval"/>
+
+ <filter type="insert_column" ref="columnList" /><!-- startCol -->
+
+ <filter type="insert_column" ref="columnList" /><!-- endCol -->
+
+ <filter type="multiple_splitter" column="1" separator=","/>
+ <filter type="column_strip" column="1"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="1" name="lower" />
+ <filter type="param_value" column="1" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="1" strip="c"/><!-- get rid of c's -->
+ <filter type="boolean" column="1" cast="int" />
+
+ <filter type="multiple_splitter" column="2" separator=","/>
+ <filter type="column_strip" column="2"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="2" name="lower" />
+ <filter type="param_value" column="2" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="2" strip="c"/><!-- get rid of c's -->
+ <filter type="boolean" column="2" cast="int" />
+
+ <filter type="multiple_splitter" column="3" separator=","/>
+ <filter type="column_strip" column="3"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="3" name="lower" />
+ <filter type="param_value" column="3" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="3" strip="c"/><!-- get rid of c's -->
+ <filter type="boolean" column="3" cast="int" />
+
+ <filter type="metadata_value" ref="input" name="chromCol" column="1" />
+ <filter type="metadata_value" ref="input" name="startCol" column="2" />
+ <filter type="metadata_value" ref="input" name="endCol" column="3" />
+
+ </option>
+ </action>
+
+ <conditional name="out_file1">
+ <when datatype_isinstance="interval">
+ <action type="metadata" name="chromCol">
+ <option type="from_param" name="columnList" column="0" offset="0"><!-- chromCol is 0-->
+ <filter type="multiple_splitter" column="0" separator=","/>
+ <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="0" name="lower" />
+ <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
+ <filter type="insert_column" value="1" iterate="True" column="0"/>
+ <filter type="boolean" column="1" cast="int" />
+ <filter type="metadata_value" ref="input" name="chromCol" column="1" />
+ </option>
+ </action>
+
+ <action type="metadata" name="startCol">
+ <option type="from_param" name="columnList" column="0" offset="0"><!-- startCol is 0-->
+ <filter type="multiple_splitter" column="0" separator=","/>
+ <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="0" name="lower" />
+ <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
+ <filter type="insert_column" value="1" iterate="True" column="0"/>
+ <filter type="boolean" column="1" cast="int" />
+ <filter type="metadata_value" ref="input" name="startCol" column="1" />
+ </option>
+ </action>
+
+ <action type="metadata" name="endCol">
+ <option type="from_param" name="columnList" column="0" offset="0"><!-- endCol is 0-->
+ <filter type="multiple_splitter" column="0" separator=","/>
+ <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="0" name="lower" />
+ <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
+ <filter type="insert_column" value="1" iterate="True" column="0"/>
+ <filter type="boolean" column="1" cast="int" />
+ <filter type="metadata_value" ref="input" name="endCol" column="1" />
+ </option>
+ </action>
+
+ <action type="metadata" name="nameCol" default="0">
+ <option type="from_param" name="columnList" column="0" offset="0"><!-- nameCol is 0-->
+ <filter type="multiple_splitter" column="0" separator=","/>
+ <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="0" name="lower" />
+ <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
+ <filter type="insert_column" value="1" iterate="True" column="0"/>
+ <filter type="boolean" column="1" cast="int" />
+ <filter type="metadata_value" ref="input" name="nameCol" column="1" />
+ </option>
+ </action>
+
+ <action type="metadata" name="strandCol" default="0">
+ <option type="from_param" name="columnList" column="0" offset="0"><!-- strandCol is 0-->
+ <filter type="multiple_splitter" column="0" separator=","/>
+ <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
+ <filter type="string_function" column="0" name="lower" />
+ <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+ <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
+ <filter type="insert_column" value="1" iterate="True" column="0"/>
+ <filter type="boolean" column="1" cast="int" />
+ <filter type="metadata_value" ref="input" name="strandCol" column="1" />
+ </option>
+ </action>
+ </when>
+ </conditional>
+
+ </when>
+ </conditional>
+ </when>
+ </conditional>
+ </actions>
+ </data>
+ </outputs>
+ <tests>
+ <test>
+ <param name="columnList" value="c1,c4,c2,c3"/>
+ <param name="delimiter" value="T"/>
+ <param name="input" value="1.bed"/>
+ <output name="out_file1" file="eq-cut.dat"/>
+ </test>
+ </tests>
+ <help>
+
+.. class:: warningmark
+
+**WARNING: This tool breaks column assignments.** To re-establish column assignments run the tools and click on the pencil icon in the latest history item.
+
+.. class:: infomark
+
+The output of this tool is always in tabular format (e.g., if your original delimiters are commas, they will be replaced with tabs). For example:
+
+ Cutting columns 1 and 3 from::
+
+ apple,is,good
+ windows,is,bad
+
+ will give::
+
+ apple good
+ windows bad
+
+-----
+
+**What it does**
+
+This tool selects (cuts out) specified columns from the dataset.
+
+- Columns are specified as **c1**, **c2**, and so on. Column count begins with **1**
+- Columns can be specified in any order (e.g., **c2,c1,c6**)
+- If you specify more columns than actually present - empty spaces will be filled with dots
+
+-----
+
+**Example**
+
+Input dataset (six columns: c1, c2, c3, c4, c5, and c6)::
+
+ chr1 10 1000 gene1 0 +
+ chr2 100 1500 gene2 0 +
+
+**cut** on columns "**c1,c4,c6**" will return::
+
+ chr1 gene1 +
+ chr2 gene2 +
+
+**cut** on columns "**c6,c5,c4,c1**" will return::
+
+ + 0 gene1 chr1
+ + 0 gene2 chr2
+
+
+**cut** on columns "**c8,c7,c4**" will return::
+
+ . . gene1
+ . . gene2
+
+
+</help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/fixedValueColumn.pl
--- /dev/null
+++ b/tools/filters/fixedValueColumn.pl
@@ -0,0 +1,34 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# fixedValueColumn.pl $input $out_file1 "expression" "iterate [yes|no]"
+
+my ($input, $out_file1, $expression, $iterate) = @ARGV;
+my $i = 0;
+my $numeric = 0;
+
+die "Check arguments\n" unless @ARGV == 4;
+
+open (DATA, "<$input") or die "Cannot open $input:$!\n";
+open (OUT, ">$out_file1") or die "Cannot create $out_file1:$!\n";
+
+if ($expression =~ m/^\d+$/) {
+ $numeric = 1;
+ $i = $expression;
+}
+
+while (<DATA>) {
+ chop;
+ if ($iterate eq "no") {
+ print OUT "$_\t$expression\n";
+ } else {
+ print OUT "$_\t$i\n" if $numeric == 1;
+ print OUT "$_\t$expression-$i\n" if $numeric == 0;
+ ++$i;
+ }
+}
+
+close DATA;
+close OUT;
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/fixedValueColumn.xml
--- /dev/null
+++ b/tools/filters/fixedValueColumn.xml
@@ -0,0 +1,61 @@
+<tool id="addValue" name="Add column">
+ <description>to an existing dataset</description>
+ <command interpreter="perl">fixedValueColumn.pl $input $out_file1 "$exp" $iterate</command>
+ <inputs>
+ <param name="exp" size="20" type="text" value="1" label="Add this value"/>
+ <param format="tabular" name="input" type="data" label="to Dataset" help="Dataset missing? See TIP below" />
+ <param name="iterate" type="select" label="Iterate?">
+ <option value="no">NO</option>
+ <option value="yes">YES</option>
+ </param>
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" metadata_source="input"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="exp" value="1"/>
+ <param name="input" value="1.bed"/>
+ <param name="iterate" value="no"/>
+ <output name="out_file1" file="eq-addvalue.dat"/>
+ </test>
+ </tests>
+ <help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**What it does**
+
+You can enter any value and it will be added as a new column to your dataset
+
+-----
+
+**Example**
+
+If you original data looks like this::
+
+ chr1 10 100 geneA
+ chr2 200 300 geneB
+ chr2 400 500 geneC
+
+Typing **+** in the text box will generate::
+
+ chr1 10 100 geneA +
+ chr2 200 300 geneB +
+ chr2 400 500 geneC +
+
+
+You can also add line numbers by selecting **Iterate: YES**. In this case if you enter **1** in the text box you will get::
+
+ chr1 10 100 geneA 1
+ chr2 200 300 geneB 2
+ chr2 400 500 geneC 3
+
+
+
+</help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/headWrapper.pl
--- /dev/null
+++ b/tools/filters/headWrapper.pl
@@ -0,0 +1,19 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# a wrapper for head for use in galaxy
+# headWrapper.pl [filename] [# lines to show] [output]
+
+die "Check arguments" unless @ARGV == 3;
+die "Line number must be an integer\n" unless $ARGV[1]=~ m/^\d+$/;
+
+open (OUT, ">$ARGV[2]") or die "Cannot create $ARGV[2]:$!\n";
+open (HEAD, "head -n $ARGV[1] $ARGV[0]|") or die "Cannot run head:$!\n";
+while (<HEAD>) {
+ print OUT;
+}
+close OUT;
+close HEAD;
+
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/headWrapper.xml
--- /dev/null
+++ b/tools/filters/headWrapper.xml
@@ -0,0 +1,42 @@
+<tool id="Show beginning1" name="Select first">
+ <description>lines from a dataset</description>
+ <command interpreter="perl">headWrapper.pl $input $lineNum $out_file1</command>
+ <inputs>
+ <param name="lineNum" size="5" type="integer" value="10" label="Select first" help="lines"/>
+ <param format="txt" name="input" type="data" label="from"/>
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" metadata_source="input"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="lineNum" value="10"/>
+ <param name="input" value="1.bed"/>
+ <output name="out_file1" file="eq-showbeginning.dat"/>
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+This tool outputs specified number of lines from the **beginning** of a dataset
+
+-----
+
+**Example**
+
+Selecting 2 lines from this::
+
+ chr7 56632 56652 D17003_CTCF_R6 310 +
+ chr7 56736 56756 D17003_CTCF_R7 354 +
+ chr7 56761 56781 D17003_CTCF_R4 220 +
+ chr7 56772 56792 D17003_CTCF_R7 372 +
+ chr7 56775 56795 D17003_CTCF_R4 207 +
+
+will produce::
+
+ chr7 56632 56652 D17003_CTCF_R6 310 +
+ chr7 56736 56756 D17003_CTCF_R7 354 +
+
+ </help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/mergeCols.py
--- /dev/null
+++ b/tools/filters/mergeCols.py
@@ -0,0 +1,37 @@
+import sys, re
+
+def stop_err( msg ):
+ sys.stderr.write( msg )
+ sys.exit()
+
+def __main__():
+ try:
+ infile = open ( sys.argv[1], 'r')
+ outfile = open ( sys.argv[2], 'w')
+ except:
+ stop_err( 'Cannot open or create a file\n' )
+
+ if len( sys.argv ) < 4:
+ stop_err( 'No columns to merge' )
+ else:
+ cols = sys.argv[3:]
+
+ skipped_lines = 0
+
+ for line in infile:
+ line = line.rstrip( '\r\n' )
+ if line and not line.startswith( '#' ):
+ fields = line.split( '\t' )
+ line += '\t'
+ for col in cols:
+ try:
+ line += fields[ int( col ) -1 ]
+ except:
+ skipped_lines += 1
+
+ print >>outfile, line
+
+ if skipped_lines > 0:
+ print 'Skipped %d invalid lines' % skipped_lines
+
+if __name__ == "__main__" : __main__()
\ No newline at end of file
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/mergeCols.xml
--- /dev/null
+++ b/tools/filters/mergeCols.xml
@@ -0,0 +1,63 @@
+<tool id="mergeCols1" name="Merge Columns" version="1.0.1">
+ <description>together</description>
+ <command interpreter="python">
+ mergeCols.py
+ $input1
+ $out_file1
+ $col1
+ $col2
+ #for $col in $columns
+ ${col.datacol}
+ #end for
+ </command>
+ <inputs>
+ <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
+ <param name="col1" label="Merge column" type="data_column" data_ref="input1" />
+ <param name="col2" label="with column" type="data_column" data_ref="input1" help="Need to add more columns? Use controls below."/>
+ <repeat name="columns" title="Columns">
+ <param name="datacol" label="Add column" type="data_column" data_ref="input1" />
+ </repeat>
+ </inputs>
+ <outputs>
+ <data format="tabular" name="out_file1" />
+ </outputs>
+ <tests>
+ <test>
+ <param name="input1" value="1.bed"/>
+ <param name="col1" value="4" />
+ <param name="col2" value="1" />
+ <param name="datacol" value="6" />
+ <output name="out_file1" file="mergeCols.dat"/>
+ </test>
+ </tests>
+<help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**What it does**
+
+This tool merges columns together. Any number of valid columns can be merged in any order.
+
+-----
+
+**Example**
+
+Input dataset (five columns: c1, c2, c3, c4, and c5)::
+
+ 1 10 1000 gene1 chr
+ 2 100 1500 gene2 chr
+
+merging columns "**c5,c1**" will return::
+
+ 1 10 1000 gene1 chr chr1
+ 2 100 1500 gene2 chr chr2
+
+.. class:: warningmark
+
+Note that all original columns are preserved and the result of merge is added as the rightmost column.
+ </help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/remove_beginning.pl
--- /dev/null
+++ b/tools/filters/remove_beginning.pl
@@ -0,0 +1,33 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# Removes the specified number of lines from the beginning of the file.
+# remove_beginning.pl [input] [num_lines] [output]
+
+die "Check arguments" unless @ARGV == 3;
+
+my $inputfile = $ARGV[0];
+my $num_lines = $ARGV[1];
+my $outputfile = $ARGV[2];
+
+my $curCount=0;
+
+my $fhIn;
+open ($fhIn, "< $inputfile") or die "Cannot open source file";
+
+my $fhOut;
+open ($fhOut, "> $outputfile");
+
+while (<$fhIn>)
+{
+ $curCount++;
+ if ($curCount<=$num_lines)
+ {
+ next;
+ }
+ print $fhOut $_;
+}
+close ($fhIn) or die "Cannot close source file";
+close ($fhOut) or die "Cannot close output file";
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/remove_beginning.xml
--- /dev/null
+++ b/tools/filters/remove_beginning.xml
@@ -0,0 +1,42 @@
+<tool id="Remove beginning1" name="Remove beginning">
+ <description>of a file</description>
+ <command interpreter="perl">remove_beginning.pl $input $num_lines $out_file1</command>
+ <inputs>
+ <param name="num_lines" size="5" type="integer" value="1" label="Remove first" help="lines"/>
+ <param format="txt" name="input" type="data" label="from"/>
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" metadata_source="input"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="num_lines" value="5"/>
+ <param name="input" value="1.bed"/>
+ <output name="out_file1" file="eq-removebeginning.dat"/>
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+This tool removes a specified number of lines from the beginning of a dataset.
+
+-----
+
+**Example**
+
+Input File::
+
+ chr7 56632 56652 D17003_CTCF_R6 310 +
+ chr7 56736 56756 D17003_CTCF_R7 354 +
+ chr7 56761 56781 D17003_CTCF_R4 220 +
+ chr7 56772 56792 D17003_CTCF_R7 372 +
+ chr7 56775 56795 D17003_CTCF_R4 207 +
+
+After removing the first 3 lines the dataset will look like this::
+
+ chr7 56772 56792 D17003_CTCF_R7 372 +
+ chr7 56775 56795 D17003_CTCF_R4 207 +
+
+</help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/tailWrapper.pl
--- /dev/null
+++ b/tools/filters/tailWrapper.pl
@@ -0,0 +1,19 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# a wrapper for tail for use in galaxy
+# lessWrapper.pl [filename] [# lines to show] [output]
+
+die "Check arguments" unless @ARGV == 3;
+die "Line number should be an integer\n" unless $ARGV[1]=~ m/^\d+$/;
+
+open (OUT, ">$ARGV[2]") or die "Cannot create $ARGV[2]:$!\n";
+open (TAIL, "tail -n $ARGV[1] $ARGV[0]|") or die "Cannot run tail:$!\n";
+while (<TAIL>) {
+ print OUT;
+}
+close OUT;
+close TAIL;
+
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/tailWrapper.xml
--- /dev/null
+++ b/tools/filters/tailWrapper.xml
@@ -0,0 +1,42 @@
+<tool id="Show tail1" name="Select last">
+ <description>lines from a dataset</description>
+ <command interpreter="perl">tailWrapper.pl $input $lineNum $out_file1</command>
+ <inputs>
+ <param name="lineNum" size="5" type="integer" value="10" label="Select last" help="lines"/>
+ <param format="txt" name="input" type="data" label="from"/>
+ </inputs>
+ <outputs>
+ <data format="input" name="out_file1" metadata_source="input"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="lineNum" value="10"/>
+ <param name="input" value="1.bed"/>
+ <output name="out_file1" file="eq-showtail.dat"/>
+ </test>
+ </tests>
+ <help>
+
+**What it does**
+
+This tool outputs specified number of lines from the **end** of a dataset
+
+-----
+
+**Example**
+
+- Input File::
+
+ chr7 57134 57154 D17003_CTCF_R7 356 -
+ chr7 57247 57267 D17003_CTCF_R4 207 +
+ chr7 57314 57334 D17003_CTCF_R5 269 +
+ chr7 57341 57361 D17003_CTCF_R7 375 +
+ chr7 57457 57477 D17003_CTCF_R3 188 +
+
+- Show last two lines of above file. The result is::
+
+ chr7 57341 57361 D17003_CTCF_R7 375 +
+ chr7 57457 57477 D17003_CTCF_R3 188 +
+
+ </help>
+</tool>
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/trimmer.py
--- /dev/null
+++ b/tools/filters/trimmer.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+
+import sys
+import optparse
+
+def stop_err( msg ):
+ sys.stderr.write( msg )
+ sys.exit()
+
+def main():
+ usage = """%prog [options]
+
+options (listed below) default to 'None' if omitted
+ """
+ parser = optparse.OptionParser(usage=usage)
+
+ parser.add_option(
+ '-a','--ascii',
+ dest='ascii',
+ action='store_true',
+ default = False,
+ help='Use ascii codes to defined ignored beginnings instead of raw characters')
+
+ parser.add_option(
+ '-q','--fastq',
+ dest='fastq',
+ action='store_true',
+ default = False,
+ help='The input data in fastq format. It selected the script skips every even line since they contain sequence ids')
+
+ parser.add_option(
+ '-i','--ignore',
+ dest='ignore',
+ help='A comma separated list on ignored beginnings (e.g., ">,@"), or its ascii codes (e.g., "60,42") if option -a is enabled')
+
+ parser.add_option(
+ '-s','--start',
+ dest='start',
+ default = '0',
+ help='Trim from beginning to here (1-based)')
+
+ parser.add_option(
+ '-e','--end',
+ dest='end',
+ default = '0',
+ help='Trim from here to the ned (1-based)')
+
+ parser.add_option(
+ '-f','--file',
+ dest='input_txt',
+ default = False,
+ help='Name of file to be chopped. STDIN is default')
+
+ parser.add_option(
+ '-c','--column',
+ dest='col',
+ default = '0',
+ help='Column to chop. If 0 = chop the whole line')
+
+
+ options, args = parser.parse_args()
+ invalid_starts = []
+
+ if options.input_txt:
+ infile = open ( options.input_txt, 'r')
+ else:
+ infile = sys.stdin
+
+ if options.ignore and options.ignore != "None":
+ invalid_starts = options.ignore.split(',')
+
+ if options.ascii and options.ignore and options.ignore != "None":
+ for i, item in enumerate( invalid_starts ):
+ invalid_starts[i] = chr( int( item ) )
+
+ col = int( options.col )
+
+ for i, line in enumerate( infile ):
+ line = line.rstrip( '\r\n' )
+ if line:
+
+ if options.fastq and i % 2 == 0:
+ print line
+ continue
+
+
+ if line[0] not in invalid_starts:
+ if col == 0:
+ if int( options.end ) > 0:
+ line = line[ int( options.start )-1 : int( options.end ) ]
+ else:
+ line = line[ int( options.start )-1 : ]
+ else:
+ fields = line.split( '\t' )
+ if col-1 > len( fields ):
+ stop_err('Column %d does not exist. Check input parameters\n' % col)
+
+ if int( options.end ) > 0:
+ fields[col - 1] = fields[col - 1][ int( options.start )-1 : int( options.end ) ]
+ else:
+ fields[col - 1] = fields[col - 1][ int( options.start )-1 : ]
+ line = '\t'.join(fields)
+ print line
+
+if __name__ == "__main__": main()
+
diff -r 2f45da781f926f667dd90ad23d23fd123716a535 -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 tools/filters/trimmer.xml
--- /dev/null
+++ b/tools/filters/trimmer.xml
@@ -0,0 +1,120 @@
+<tool id="trimmer" name="Trim" version="0.0.1">
+ <description>leading or trailing characters</description>
+ <command interpreter="python">
+ trimmer.py -a -f $input1 -c $col -s $start -e $end -i $ignore $fastq > $out_file1
+ </command>
+ <inputs>
+ <param format="tabular,txt" name="input1" type="data" label="this dataset"/>
+ <param name="col" type="integer" value="0" label="Trim this column only" help="0 = process entire line" />
+ <param name="start" type="integer" size="10" value="1" label="Trim from the beginning to this position" help="1 = do not trim the beginning"/>
+ <param name="end" type="integer" size="10" value="0" label="Remove everything from this position to the end" help="0 = do not trim the end"/>
+ <param name="fastq" type="select" label="Is input dataset in fastq format?" help="If set to YES, the tool will not trim evenly numbered lines (0, 2, 4, etc...)">
+ <option selected="true" value="">No</option>
+ <option value="-q">Yes</option>
+ </param>
+ <param name="ignore" type="select" display="checkboxes" multiple="True" label="Ignore lines beginning with these characters" help="lines beginning with these are not trimmed">
+ <option value="62">></option>
+ <option value="64">@</option>
+ <option value="43">+</option>
+ <option value="60"><</option>
+ <option value="42">*</option>
+ <option value="45">-</option>
+ <option value="61">=</option>
+ <option value="124">|</option>
+ <option value="63">?</option>
+ <option value="36">$</option>
+ <option value="46">.</option>
+ <option value="58">:</option>
+ <option value="38">&</option>
+ <option value="37">%</option>
+ <option value="94">^</option>
+ <option value="35">#</option>
+ </param>
+ </inputs>
+ <outputs>
+ <data name="out_file1" format="input" metadata_source="input1"/>
+ </outputs>
+ <tests>
+ <test>
+ <param name="input1" value="trimmer_tab_delimited.dat"/>
+ <param name="col" value="0"/>
+ <param name="start" value="1"/>
+ <param name="end" value="13"/>
+ <param name="ignore" value="62"/>
+ <param name="fastq" value="No"/>
+ <output name="out_file1" file="trimmer_a_f_c0_s1_e13_i62.dat"/>
+ </test>
+ <test>
+ <param name="input1" value="trimmer_tab_delimited.dat"/>
+ <param name="col" value="2"/>
+ <param name="start" value="1"/>
+ <param name="end" value="2"/>
+ <param name="ignore" value="62"/>
+ <param name="fastq" value="No"/>
+ <output name="out_file1" file="trimmer_a_f_c2_s1_e2_i62.dat"/>
+ </test>
+
+ </tests>
+
+ <help>
+
+
+**What it does**
+
+Trims specified number of characters from a dataset or its field (if dataset is tab-delimited).
+
+-----
+
+**Example 1**
+
+Trimming this dataset::
+
+ 1234567890
+ abcdefghijk
+
+by setting **Trim from the beginning to this position** to *2* and **Remove everything from this position to the end** to *6* will produce::
+
+ 23456
+ bcdef
+
+-----
+
+**Example 2**
+
+Trimming column 2 of this dataset::
+
+ abcde 12345 fghij 67890
+ fghij 67890 abcde 12345
+
+by setting **Trim content of this column only** to *2*, **Trim from the beginning to this position** to *2*, and **Remove everything from this position to the end** to *4* will produce::
+
+ abcde 234 fghij 67890
+ fghij 789 abcde 12345
+
+-----
+
+**Trimming FASTQ datasets**
+
+This tool can be used to trim sequences and quality strings in fastq datasets. This is done by selected *Yes* from the **Is input dataset in fastq format?** dropdown. If set to *Yes*, the tool will skip all even numbered lines (see warning below). For example, trimming last 5 bases of this dataset::
+
+ @081017-and-081020:1:1:1715:1759
+ GGACTCAGATAGTAATCCACGCTCCTTTAAAATATC
+ +
+ II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B
+
+cab done by setting **Remove everything from this position to the end** to 31::
+
+ @081017-and-081020:1:1:1715:1759
+ GGACTCAGATAGTAATCCACGCTCCTTTAAA
+ +
+ II#IIIIIII$5+.(9IIIIIII$%*$G$A3
+
+**Note** that headers are skipped.
+
+.. class:: warningmark
+
+**WARNING:** This tool will only work on properly formatted fastq datasets where (1) each read and quality string occupy one line and (2) '@' (read header) and "+" (quality header) lines are evenly numbered like in the above example.
+
+
+ </help>
+</tool>
https://bitbucket.org/galaxy/galaxy-central/changeset/facdd387b85e/
changeset: facdd387b85e
user: inithello
date: 2012-12-07 22:39:17
summary: Updated tool_conf.xml.main with reverted migration.
affected #: 1 file
diff -r 659392eae2a8946ea6d2f2c5534d5c51c1dc28f3 -r facdd387b85e814df7428ca8cbb71828d0ec48a2 tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -33,11 +33,21 @@
<tool file="extract/liftOver_wrapper.xml" /></section><section name="Text Manipulation" id="textutil">
+ <tool file="filters/fixedValueColumn.xml" /><tool file="stats/column_maker.xml" /><tool file="filters/catWrapper.xml" />
+ <tool file="filters/condense_characters.xml" />
+ <tool file="filters/convert_characters.xml" />
+ <tool file="filters/mergeCols.xml" /><tool file="filters/CreateInterval.xml" />
+ <tool file="filters/cutWrapper.xml" />
+ <tool file="filters/changeCase.xml" /><tool file="filters/pasteWrapper.xml" />
+ <tool file="filters/remove_beginning.xml" /><tool file="filters/randomlines.xml" />
+ <tool file="filters/headWrapper.xml" />
+ <tool file="filters/tailWrapper.xml" />
+ <tool file="filters/trimmer.xml" /><tool file="filters/wc_gnu.xml" /><tool file="filters/secure_hash_message_digest.xml" /></section>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c68762fcb5a5/
changeset: c68762fcb5a5
user: inithello
date: 2012-12-07 22:08:40
summary: Migrate tools to tool shed: addValue, ChangeCase, Condense characters1, Convert characters1, Cut1, mergeCols1, Remove beginning1, Show beginning1, Show tail1, trimmer
affected #: 35 files
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 lib/galaxy/tool_shed/migrate/versions/0008_tools.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/migrate/versions/0008_tools.py
@@ -0,0 +1,21 @@
+"""
+The following tools have been eliminated from the distribution:
+Add column to an existing dataset, Change Case of selected columns,
+Condense consecutive characters, Convert delimiters to TAB,
+Cut columns from a table, Merge Columns together, Remove beginning of a file,
+Select first lines from a dataset, Select last lines from a dataset,
+and Trim leading or trailing characters. The tools are now available in the
+repositories named add_value, change_case, condense_characters,
+convert_characters, cut_columns, merge_cols, remove_beginning,
+show_beginning, show_tail, and trimmer from the main Galaxy tool shed at
+http://toolshed.g2.bx.psu.edu, and will be installed into your
+local Galaxy instance at the location discussed above by running
+the following command.
+"""
+
+import sys
+
+def upgrade():
+ print __doc__
+def downgrade():
+ pass
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 scripts/migrate_tools/0008_tools.sh
--- /dev/null
+++ b/scripts/migrate_tools/0008_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0008_tools.xml $@
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 scripts/migrate_tools/0008_tools.xml
--- /dev/null
+++ b/scripts/migrate_tools/0008_tools.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+ <repository name="add_value" description="Add a value as a new column." changeset_revision="181dd378275c">
+ <tool id="addValue" version="1.0.0" file="fixedValueColumn.xml" />
+ </repository>
+ <repository name="change_case" description="Change the case of a column." changeset_revision="e6f966602870">
+ <tool id="ChangeCase" version="1.0.0" file="changeCase.xml" />
+ </repository>
+ <repository name="condense_characters" description="Condense repeated characters." changeset_revision="2c08781560de">
+ <tool id="Condense characters1" version="1.0.0" file="condense_characters.xml" />
+ </repository>
+ <repository name="convert_characters" description="Convert delimiters to TAB." changeset_revision="64d46676a13e">
+ <tool id="Convert characters1" version="1.0.0" file="convert_characters.xml" />
+ </repository>
+ <repository name="cut_columns" description="Remove or rearrange columns." changeset_revision="34c29e183ef7">
+ <tool id="Cut1" version="1.0.1" file="cutWrapper.xml" />
+ </repository>
+ <repository name="merge_cols" description="Merge columns together." changeset_revision="28ca7552e884">
+ <tool id="mergeCols1" version="1.0.1" file="mergeCols.xml" />
+ </repository>
+ <repository name="remove_beginning" description="Remove lines from the beginning of a file." changeset_revision="d9b82504a321">
+ <tool id="Remove beginning1" version="1.0.0" file="remove_beginning.xml" />
+ </repository>
+ <repository name="show_beginning" description="Select lines from the beginning of a file." changeset_revision="ecca14446e6a">
+ <tool id="Show beginning1" version="1.0.0" file="headWrapper.xml" />
+ </repository>
+ <repository name="show_tail" description="Select lines from the end of a file." changeset_revision="8bb4d908a523">
+ <tool id="Show tail1" version="1.0.0" file="tailWrapper.xml" />
+ </repository>
+ <repository name="trimmer" description="Trim trailing characters from each line or column." changeset_revision="f862a6e4d096">
+ <tool id="trimmer" version="0.0.1" file="trimmer.xml" />
+ </repository>
+</toolshed>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/a.txt
--- a/test-data/a.txt
+++ /dev/null
@@ -1,15 +0,0 @@
- CHR SNP BP A1 TEST NMISS BETA STAT P
- 1 rs1181876 3671541 T DOMDEV 958 -1.415 -3.326 0.0009161
- 1 rs10492923 5092886 C ADD 1007 5.105 4.368 1.382e-05
- 1 rs10492923 5092886 C DOMDEV 1007 -5.612 -4.249 2.35e-05
- 1 rs10492923 5092886 C GENO_2DF 1007 NA 19.9 4.775e-05
- 1 rs1801133 11778965 T ADD 1022 1.23 3.97 7.682e-05
- 1 rs1801133 11778965 T GENO_2DF 1022 NA 16.07 0.0003233
- 1 rs1361912 12663121 A ADD 1021 12.69 4.093 4.596e-05
- 1 rs1361912 12663121 A DOMDEV 1021 -12.37 -3.945 8.533e-05
- 1 rs1361912 12663121 A GENO_2DF 1021 NA 17.05 0.0001982
- 1 rs1009806 19373138 G ADD 1021 -1.334 -3.756 0.0001826
- 1 rs1009806 19373138 G GENO_2DF 1021 NA 19.36 6.244e-05
- 1 rs873654 29550948 A DOMDEV 1012 1.526 3.6 0.0003339
- 1 rs10489527 36800027 C ADD 1016 12.67 4.114 4.211e-05
- 1 rs10489527 36800027 C DOMDEV 1016 -13.05 -4.02 6.249e-05
\ No newline at end of file
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/changeCase_out1.tabular
--- a/test-data/changeCase_out1.tabular
+++ /dev/null
@@ -1,10 +0,0 @@
-CHR1 4225 19670
-CHR10 6 8
-CHR1 24417 24420
-CHR6_HLA_HAP2 0 150
-CHR2 1 5
-CHR10 2 10
-CHR1 30 55
-CHRY 1 20
-CHR1 1225979 42287290
-CHR10 7 8
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/changeCase_out2.tabular
--- a/test-data/changeCase_out2.tabular
+++ /dev/null
@@ -1,65 +0,0 @@
-CHR1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
-CHR1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
-CHR1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
-CHR1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
-CHR10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
-CHR11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
-CHR11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
-CHR11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
-CHR11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
-CHR12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
-CHR13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
-CHR14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
-CHR15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
-CHR15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
-CHR15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
-CHR15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
-CHR16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
-CHR16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
-CHR16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
-CHR16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
-CHR18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
-CHR18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
-CHR18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
-CHR18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
-CHR19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
-CHR19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
-CHR19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
-CHR19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
-CHR2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
-CHR2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
-CHR2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
-CHR2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
-CHR20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
-CHR20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
-CHR20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
-CHR20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
-CHR21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
-CHR21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
-CHR21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
-CHR21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
-CHR22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
-CHR22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
-CHR22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
-CHR22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
-CHR5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
-CHR5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
-CHR5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
-CHR5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
-CHR6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
-CHR6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
-CHR6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
-CHR6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
-CHR7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
-CHR7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
-CHR7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
-CHR7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
-CHR8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
-CHR9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
-CHR9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
-CHR9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
-CHR9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
-CHRX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
-CHRX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
-CHRX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
-CHRX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/eq-addvalue.dat
--- a/test-data/eq-addvalue.dat
+++ /dev/null
@@ -1,65 +0,0 @@
-chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 - 1
-chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 + 1
-chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 - 1
-chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 + 1
-chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 - 1
-chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 - 1
-chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 + 1
-chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 - 1
-chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 + 1
-chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 - 1
-chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 + 1
-chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 - 1
-chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 - 1
-chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 + 1
-chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 - 1
-chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 + 1
-chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 + 1
-chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 - 1
-chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 + 1
-chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 - 1
-chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 - 1
-chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 + 1
-chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 - 1
-chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 + 1
-chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 + 1
-chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 - 1
-chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 + 1
-chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 - 1
-chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 + 1
-chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 - 1
-chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 + 1
-chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 - 1
-chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 - 1
-chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 + 1
-chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 - 1
-chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 + 1
-chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 + 1
-chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 - 1
-chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 + 1
-chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 - 1
-chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 + 1
-chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 - 1
-chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 + 1
-chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 - 1
-chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + 1
-chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 - 1
-chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 + 1
-chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 - 1
-chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 - 1
-chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 + 1
-chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 - 1
-chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 + 1
-chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 + 1
-chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 - 1
-chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 + 1
-chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 - 1
-chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 - 1
-chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 + 1
-chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 - 1
-chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 + 1
-chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 - 1
-chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 + 1
-chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 - 1
-chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 + 1
-chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 - 1
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/eq-condense.dat
--- a/test-data/eq-condense.dat
+++ /dev/null
@@ -1,65 +0,0 @@
-chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
-chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
-chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
-chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
-chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
-chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
-chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
-chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
-chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
-chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
-chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
-chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
-chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
-chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
-chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
-chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
-chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
-chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
-chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
-chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
-chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
-chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
-chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
-chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
-chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
-chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
-chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
-chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
-chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
-chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
-chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
-chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
-chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
-chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
-chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
-chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
-chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
-chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
-chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
-chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
-chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
-chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
-chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
-chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
-chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
-chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
-chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
-chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
-chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
-chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
-chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
-chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
-chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
-chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
-chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
-chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
-chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
-chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
-chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
-chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
-chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
-chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
-chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
-chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
-chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/eq-convert.dat
--- a/test-data/eq-convert.dat
+++ /dev/null
@@ -1,65 +0,0 @@
-chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
-chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
-chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
-chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
-chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
-chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
-chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
-chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
-chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
-chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
-chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
-chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
-chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
-chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
-chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
-chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
-chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
-chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
-chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
-chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
-chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
-chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
-chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
-chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
-chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
-chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
-chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
-chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
-chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
-chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
-chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
-chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
-chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
-chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
-chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
-chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
-chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
-chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
-chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
-chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
-chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
-chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
-chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
-chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
-chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
-chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
-chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
-chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
-chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
-chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
-chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
-chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
-chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
-chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
-chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
-chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
-chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
-chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
-chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
-chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
-chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
-chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
-chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
-chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
-chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/eq-cut.dat
--- a/test-data/eq-cut.dat
+++ /dev/null
@@ -1,65 +0,0 @@
-chr1 CCDS989.1_cds_0_0_chr1_147962193_r 147962192 147962580
-chr1 CCDS990.1_cds_0_0_chr1_147984546_f 147984545 147984630
-chr1 CCDS993.1_cds_0_0_chr1_148078401_r 148078400 148078582
-chr1 CCDS996.1_cds_0_0_chr1_148185137_f 148185136 148185276
-chr10 CCDS7248.1_cds_0_0_chr10_55251624_r 55251623 55253124
-chr11 CCDS8374.1_cds_0_0_chr11_116124408_r 116124407 116124501
-chr11 CCDS8377.1_cds_0_0_chr11_116206509_f 116206508 116206563
-chr11 CCDS8378.1_cds_0_0_chr11_116211734_r 116211733 116212337
-chr11 CCDS7726.1_cds_0_0_chr11_1812378_f 1812377 1812407
-chr12 CCDS8736.1_cds_0_0_chr12_38440095_r 38440094 38440321
-chr13 CCDS9526.1_cds_0_0_chr13_112381695_f 112381694 112381953
-chr14 CCDS9949.1_cds_0_0_chr14_98710241_r 98710240 98712285
-chr15 CCDS10096.1_cds_0_0_chr15_41486873_r 41486872 41487060
-chr15 CCDS10097.1_cds_0_0_chr15_41673709_f 41673708 41673857
-chr15 CCDS10098.1_cds_0_0_chr15_41679162_r 41679161 41679250
-chr15 CCDS10101.1_cds_0_0_chr15_41826030_f 41826029 41826196
-chr16 CCDS10397.1_cds_0_0_chr16_142909_f 142908 143003
-chr16 CCDS10401.1_cds_0_0_chr16_179964_r 179963 180135
-chr16 CCDS10402.1_cds_0_0_chr16_244414_f 244413 244681
-chr16 CCDS10403.1_cds_0_0_chr16_259269_r 259268 259383
-chr18 CCDS11891.1_cds_0_0_chr18_23786115_r 23786114 23786321
-chr18 CCDS11985.1_cds_0_0_chr18_59406882_f 59406881 59407046
-chr18 CCDS11986.1_cds_0_0_chr18_59455933_r 59455932 59456337
-chr18 CCDS11988.1_cds_0_0_chr18_59600587_f 59600586 59600754
-chr19 CCDS12866.1_cds_0_0_chr19_59068596_f 59068595 59069564
-chr19 CCDS12872.1_cds_0_0_chr19_59236027_r 59236026 59236146
-chr19 CCDS12877.1_cds_0_0_chr19_59297999_f 59297998 59298008
-chr19 CCDS12878.1_cds_0_0_chr19_59302169_r 59302168 59302288
-chr2 CCDS2120.1_cds_0_0_chr2_118288584_f 118288583 118288668
-chr2 CCDS2121.1_cds_0_0_chr2_118394149_r 118394148 118394202
-chr2 CCDS2441.1_cds_0_0_chr2_220190203_f 220190202 220190242
-chr2 CCDS2443.1_cds_0_0_chr2_220229610_r 220229609 220230869
-chr20 CCDS13249.1_cds_0_0_chr20_33330414_r 33330413 33330423
-chr20 CCDS13255.1_cds_0_0_chr20_33513607_f 33513606 33513792
-chr20 CCDS13256.1_cds_0_0_chr20_33579501_r 33579500 33579527
-chr20 CCDS13257.1_cds_0_0_chr20_33593261_f 33593260 33593348
-chr21 CCDS13614.1_cds_0_0_chr21_32707033_f 32707032 32707192
-chr21 CCDS13615.1_cds_0_0_chr21_32869642_r 32869641 32870022
-chr21 CCDS13620.1_cds_0_0_chr21_33321041_f 33321040 33322012
-chr21 CCDS13625.1_cds_0_0_chr21_33744995_r 33744994 33745040
-chr22 CCDS13897.1_cds_0_0_chr22_30120224_f 30120223 30120265
-chr22 CCDS13898.1_cds_0_0_chr22_30160420_r 30160419 30160661
-chr22 CCDS13901.1_cds_0_0_chr22_30665274_f 30665273 30665360
-chr22 CCDS13903.1_cds_0_0_chr22_30939055_r 30939054 30939266
-chr5 CCDS4149.1_cds_0_0_chr5_131424299_f 131424298 131424460
-chr5 CCDS4151.1_cds_0_0_chr5_131556602_r 131556601 131556672
-chr5 CCDS4152.1_cds_0_0_chr5_131621327_f 131621326 131621419
-chr5 CCDS4155.1_cds_0_0_chr5_131847542_r 131847541 131847666
-chr6 CCDS5061.1_cds_0_0_chr6_108299601_r 108299600 108299744
-chr6 CCDS5063.1_cds_0_0_chr6_108594663_f 108594662 108594687
-chr6 CCDS5064.1_cds_0_0_chr6_108640046_r 108640045 108640151
-chr6 CCDS5067.1_cds_0_0_chr6_108722977_f 108722976 108723115
-chr7 CCDS5760.1_cds_0_0_chr7_113660518_f 113660517 113660685
-chr7 CCDS5771.1_cds_0_0_chr7_116512160_r 116512159 116512389
-chr7 CCDS5773.1_cds_0_0_chr7_116714100_f 116714099 116714152
-chr7 CCDS5774.1_cds_0_0_chr7_116945542_r 116945541 116945787
-chr8 CCDS6324.1_cds_0_0_chr8_118881132_r 118881131 118881317
-chr9 CCDS6914.1_cds_0_0_chr9_128764157_f 128764156 128764189
-chr9 CCDS6915.1_cds_0_0_chr9_128787520_r 128787519 128789136
-chr9 CCDS6917.1_cds_0_0_chr9_128882428_f 128882427 128882523
-chr9 CCDS6919.1_cds_0_0_chr9_128937230_r 128937229 128937445
-chrX CCDS14606.1_cds_0_0_chrX_122745048_f 122745047 122745924
-chrX CCDS14733.1_cds_0_0_chrX_152648965_r 152648964 152649196
-chrX CCDS14735.1_cds_0_0_chrX_152691447_f 152691446 152691471
-chrX CCDS14736.1_cds_0_0_chrX_152694030_r 152694029 152694263
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/mergeCols.dat
--- a/test-data/mergeCols.dat
+++ /dev/null
@@ -1,65 +0,0 @@
-chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 - CCDS989.1_cds_0_0_chr1_147962193_rchr1-
-chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 + CCDS990.1_cds_0_0_chr1_147984546_fchr1+
-chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 - CCDS993.1_cds_0_0_chr1_148078401_rchr1-
-chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 + CCDS996.1_cds_0_0_chr1_148185137_fchr1+
-chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 - CCDS7248.1_cds_0_0_chr10_55251624_rchr10-
-chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 - CCDS8374.1_cds_0_0_chr11_116124408_rchr11-
-chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 + CCDS8377.1_cds_0_0_chr11_116206509_fchr11+
-chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 - CCDS8378.1_cds_0_0_chr11_116211734_rchr11-
-chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 + CCDS7726.1_cds_0_0_chr11_1812378_fchr11+
-chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 - CCDS8736.1_cds_0_0_chr12_38440095_rchr12-
-chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 + CCDS9526.1_cds_0_0_chr13_112381695_fchr13+
-chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 - CCDS9949.1_cds_0_0_chr14_98710241_rchr14-
-chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 - CCDS10096.1_cds_0_0_chr15_41486873_rchr15-
-chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 + CCDS10097.1_cds_0_0_chr15_41673709_fchr15+
-chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 - CCDS10098.1_cds_0_0_chr15_41679162_rchr15-
-chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 + CCDS10101.1_cds_0_0_chr15_41826030_fchr15+
-chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 + CCDS10397.1_cds_0_0_chr16_142909_fchr16+
-chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 - CCDS10401.1_cds_0_0_chr16_179964_rchr16-
-chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 + CCDS10402.1_cds_0_0_chr16_244414_fchr16+
-chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 - CCDS10403.1_cds_0_0_chr16_259269_rchr16-
-chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 - CCDS11891.1_cds_0_0_chr18_23786115_rchr18-
-chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 + CCDS11985.1_cds_0_0_chr18_59406882_fchr18+
-chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 - CCDS11986.1_cds_0_0_chr18_59455933_rchr18-
-chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 + CCDS11988.1_cds_0_0_chr18_59600587_fchr18+
-chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 + CCDS12866.1_cds_0_0_chr19_59068596_fchr19+
-chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 - CCDS12872.1_cds_0_0_chr19_59236027_rchr19-
-chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 + CCDS12877.1_cds_0_0_chr19_59297999_fchr19+
-chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 - CCDS12878.1_cds_0_0_chr19_59302169_rchr19-
-chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 + CCDS2120.1_cds_0_0_chr2_118288584_fchr2+
-chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 - CCDS2121.1_cds_0_0_chr2_118394149_rchr2-
-chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 + CCDS2441.1_cds_0_0_chr2_220190203_fchr2+
-chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 - CCDS2443.1_cds_0_0_chr2_220229610_rchr2-
-chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 - CCDS13249.1_cds_0_0_chr20_33330414_rchr20-
-chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 + CCDS13255.1_cds_0_0_chr20_33513607_fchr20+
-chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 - CCDS13256.1_cds_0_0_chr20_33579501_rchr20-
-chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 + CCDS13257.1_cds_0_0_chr20_33593261_fchr20+
-chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 + CCDS13614.1_cds_0_0_chr21_32707033_fchr21+
-chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 - CCDS13615.1_cds_0_0_chr21_32869642_rchr21-
-chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 + CCDS13620.1_cds_0_0_chr21_33321041_fchr21+
-chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 - CCDS13625.1_cds_0_0_chr21_33744995_rchr21-
-chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 + CCDS13897.1_cds_0_0_chr22_30120224_fchr22+
-chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 - CCDS13898.1_cds_0_0_chr22_30160420_rchr22-
-chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 + CCDS13901.1_cds_0_0_chr22_30665274_fchr22+
-chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 - CCDS13903.1_cds_0_0_chr22_30939055_rchr22-
-chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + CCDS4149.1_cds_0_0_chr5_131424299_fchr5+
-chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 - CCDS4151.1_cds_0_0_chr5_131556602_rchr5-
-chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 + CCDS4152.1_cds_0_0_chr5_131621327_fchr5+
-chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 - CCDS4155.1_cds_0_0_chr5_131847542_rchr5-
-chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 - CCDS5061.1_cds_0_0_chr6_108299601_rchr6-
-chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 + CCDS5063.1_cds_0_0_chr6_108594663_fchr6+
-chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 - CCDS5064.1_cds_0_0_chr6_108640046_rchr6-
-chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 + CCDS5067.1_cds_0_0_chr6_108722977_fchr6+
-chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 + CCDS5760.1_cds_0_0_chr7_113660518_fchr7+
-chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 - CCDS5771.1_cds_0_0_chr7_116512160_rchr7-
-chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 + CCDS5773.1_cds_0_0_chr7_116714100_fchr7+
-chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 - CCDS5774.1_cds_0_0_chr7_116945542_rchr7-
-chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 - CCDS6324.1_cds_0_0_chr8_118881132_rchr8-
-chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 + CCDS6914.1_cds_0_0_chr9_128764157_fchr9+
-chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 - CCDS6915.1_cds_0_0_chr9_128787520_rchr9-
-chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 + CCDS6917.1_cds_0_0_chr9_128882428_fchr9+
-chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 - CCDS6919.1_cds_0_0_chr9_128937230_rchr9-
-chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 + CCDS14606.1_cds_0_0_chrX_122745048_fchrX+
-chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 - CCDS14733.1_cds_0_0_chrX_152648965_rchrX-
-chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 + CCDS14735.1_cds_0_0_chrX_152691447_fchrX+
-chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 - CCDS14736.1_cds_0_0_chrX_152694030_rchrX-
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/trimmer_a_f_c0_s1_e13_i62.dat
--- a/test-data/trimmer_a_f_c0_s1_e13_i62.dat
+++ /dev/null
@@ -1,5 +0,0 @@
-12345 abcdef
-67890 ghjkl g
->assa lljlj ljlj
-sasas hghg hg
-@dgf gfgf gfg
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/trimmer_a_f_c2_s1_e2_i62.dat
--- a/test-data/trimmer_a_f_c2_s1_e2_i62.dat
+++ /dev/null
@@ -1,5 +0,0 @@
-12345 ab xyz
-67890 gh ghjt
->assa lljlj ljlj
-sasas hg hghg
-@dgf gf gfgf
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 test-data/trimmer_tab_delimited.dat
--- a/test-data/trimmer_tab_delimited.dat
+++ /dev/null
@@ -1,5 +0,0 @@
-12345 abcdef xyz
-67890 ghjkl ghjt
->assa lljlj ljlj
-sasas hghg hghg
-@dgf gfgf gfgf
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -44,21 +44,12 @@
<tool file="extract/liftOver_wrapper.xml" /></section><section name="Text Manipulation" id="textutil">
- <tool file="filters/fixedValueColumn.xml" /><tool file="stats/column_maker.xml" /><tool file="filters/catWrapper.xml" /><tool file="filters/cutWrapper.xml" />
- <tool file="filters/mergeCols.xml" />
- <tool file="filters/convert_characters.xml" /><tool file="filters/CreateInterval.xml" />
- <tool file="filters/cutWrapper.xml" />
- <tool file="filters/changeCase.xml" /><tool file="filters/pasteWrapper.xml" />
- <tool file="filters/remove_beginning.xml" /><tool file="filters/randomlines.xml" />
- <tool file="filters/headWrapper.xml" />
- <tool file="filters/tailWrapper.xml" />
- <tool file="filters/trimmer.xml" /><tool file="filters/wc_gnu.xml" /><tool file="filters/secure_hash_message_digest.xml" /><tool file="stats/dna_filtering.xml" />
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/changeCase.pl
--- a/tools/filters/changeCase.pl
+++ /dev/null
@@ -1,58 +0,0 @@
-#! /usr/bin/perl -w
-
-use strict;
-use warnings;
-
-my $columns = {};
-my $del = "";
-my @in = ();
-my @out = ();
-my $command = "";
-my $field = 0;
-
-# a wrapper for changing the case of columns from within galaxy
-# isaChangeCase.pl [filename] [columns] [delim] [casing] [output]
-
-die "Check arguments: $0 [filename] [columns] [delim] [casing] [output]\n" unless @ARGV == 5;
-
-# process column input
-$ARGV[1] =~ s/\s+//g;
-foreach ( split /,/, $ARGV[1] ) {
- if (m/^c\d{1,}$/i) {
- s/c//ig;
- $columns->{$_} = --$_;
- }
-}
-
-die "No columns specified, columns are not preceeded with 'c', or commas are not used to separate column numbers: $ARGV[1]\n" if keys %$columns == 0;
-
-my $column_delimiters_href = {
- 'TAB' => q{\t},
- 'COMMA' => ",",
- 'DASH' => "-",
- 'UNDERSCORE' => "_",
- 'PIPE' => q{\|},
- 'DOT' => q{\.},
- 'SPACE' => q{\s+}
-};
-
-$del = $column_delimiters_href->{$ARGV[2]};
-
-open (OUT, ">$ARGV[4]") or die "Cannot create $ARGV[4]:$!\n";
-open (IN, "<$ARGV[0]") or die "Cannot open $ARGV[0]:$!\n";
-while (<IN>) {
- chop;
- @in = split /$del/;
- for ( my $i = 0; $i <= $#in; ++$i) {
- if (exists $columns->{$i}) {
- push(@out, $ARGV[3] eq 'up' ? uc($in[$i]) : lc($in[$i]));
- } else {
- push(@out, $in[$i]);
- }
- }
- print OUT join("\t",@out), "\n";
- @out = ();
-}
-close IN;
-
-close OUT;
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/changeCase.xml
--- a/tools/filters/changeCase.xml
+++ /dev/null
@@ -1,77 +0,0 @@
-<tool id="ChangeCase" name="Change Case">
- <description> of selected columns</description>
- <stdio>
- <exit_code range="1:" err_level="fatal" />
- </stdio>
- <command interpreter="perl">changeCase.pl $input "$cols" $delimiter $casing $out_file1</command>
- <inputs>
- <param name="input" format="txt" type="data" label="From"/>
- <param name="cols" size="10" type="text" value="c1,c2" label="Change case of columns"/>
- <param name="delimiter" type="select" label="Delimited by">
- <option value="TAB">Tab</option>
- <option value="SPACE">Whitespace</option>
- <option value="DOT">Dot</option>
- <option value="COMMA">Comma</option>
- <option value="DASH">Dash</option>
- <option value="UNDERSCORE">Underscore</option>
- <option value="PIPE">Pipe</option>
- </param>
- <param name="casing" type="select" label="To">
- <option value="up">Upper case</option>
- <option value="lo">Lower case</option>
- </param>
- </inputs>
- <outputs>
- <data format="tabular" name="out_file1" />
- </outputs>
- <tests>
- <test>
- <param name="input" value="1.txt" ftype="txt"/>
- <param name="cols" value="c1"/>
- <param name="delimiter" value="SPACE"/>
- <param name="casing" value="up"/>
- <output name="out_file1" file="changeCase_out1.tabular"/>
- </test>
- <test>
- <param name="input" value="1.bed" ftype="bed"/>
- <param name="cols" value="c1"/>
- <param name="delimiter" value="TAB"/>
- <param name="casing" value="up"/>
- <output name="out_file1" file="changeCase_out2.tabular"/>
- </test>
- </tests>
- <help>
-
-.. class:: warningmark
-
-**This tool breaks column assignments.** To re-establish column assignments run the tool and click on the pencil icon in the resulting history item.
-
-.. class:: warningmark
-
-The format of the resulting dataset from this tool is always tabular.
-
------
-
-**What it does**
-
-This tool selects specified columns from a dataset and converts the values of those columns to upper or lower case.
-
-- Columns are specified as **c1**, **c2**, and so on.
-- Columns can be specified in any order (e.g., **c2,c1,c6**)
-
------
-
-**Example**
-
-Changing columns 1 and 3 ( delimited by Comma ) to upper case in::
-
- apple,is,good
- windows,is,bad
-
-will result in::
-
- APPLE is GOOD
- WINDOWS is BAD
-
- </help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/condense_characters.pl
--- a/tools/filters/condense_characters.pl
+++ /dev/null
@@ -1,105 +0,0 @@
-#! /usr/bin/perl -w
-
-use strict;
-use warnings;
-
-# condenses all consecutive characters of one type
-# convert_characters.pl [input] [character] [output]
-
-die "Check arguments" unless @ARGV == 3;
-
-my $inputfile = $ARGV[0];
-my $character = $ARGV[1];
-my $outputfile = $ARGV[2];
-
-
-my $convert_from;
-my $convert_to;
-
-
-if ($character eq "s")
-{
- $convert_from = '\s';
-}
-elsif ($character eq "T")
-{
- $convert_from = '\t';
-}
-elsif ($character eq "Sp")
-{
- $convert_from = " ";
-}
-elsif ($character eq "Dt")
-{
- $convert_from = '\.';
-}
-elsif ($character eq "C")
-{
- $convert_from = ",";
-}
-elsif ($character eq "D")
-{
- $convert_from = "-";
-}
-elsif ($character eq "U")
-{
- $convert_from = "_";
-}
-elsif ($character eq "P")
-{
- $convert_from = '\|';
-}
-else
-{
- die "Invalid value specified for convert from\n";
-}
-
-
-if ($character eq "T")
-{
- $convert_to = "\t";
-}
-elsif ($character eq "Sp")
-{
- $convert_to = " ";
-}
-elsif ($character eq "Dt")
-{
- $convert_to = "\.";
-}
-elsif ($character eq "C")
-{
- $convert_to = ",";
-}
-elsif ($character eq "D")
-{
- $convert_to = "-";
-}
-elsif ($character eq "U")
-{
- $convert_to = "_";
-}
-elsif ($character eq "P")
-{
- $convert_to = "|";
-}
-else
-{
- die "Invalid value specified for Convert to\n";
-}
-
-my $fhIn;
-open ($fhIn, "< $inputfile") or die "Cannot open source file";
-
-my $fhOut;
-open ($fhOut, "> $outputfile");
-
-while (<$fhIn>)
-{
- my $thisLine = $_;
- chomp $thisLine;
- $thisLine =~ s/${convert_from}+/$convert_to/g;
- print $fhOut $thisLine,"\n";
-}
-close ($fhIn) or die "Cannot close source file";
-close ($fhOut) or die "Cannot close output file";
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/condense_characters.xml
--- a/tools/filters/condense_characters.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<tool id="Condense characters1" name="Condense">
- <description>consecutive characters</description>
- <command interpreter="perl">condense_characters.pl $input $character $out_file1</command>
- <inputs>
-<!-- <display>condense all consecutive $character from $input</display> -->
- <param name="character" type="select" label="Condense all consecutive">
- <option value="T">Tabs</option>
- <option value="Sp">Spaces</option>
- <option value="Dt">Dots</option>
- <option value="C">Commas</option>
- <option value="D">Dashes</option>
- <option value="U">Underscores</option>
- <option value="P">Pipes</option>
- </param>
- <param format="txt" name="input" type="data" label="in this Query"/>
- </inputs>
- <outputs>
- <data format="input" name="out_file1" metadata_source="input" />
- </outputs>
- <tests>
- <test>
- <param name="character" value="T"/>
- <param name="input" value="1.bed"/>
- <output name="out_file1" file="eq-condense.dat"/>
- </test>
- </tests>
- <help>
-
-**What it does**
-
-This tool condenses all consecutive characters of a specified type.
-
------
-
-**Example**
-
-- Input file::
-
- geneX,,,10,,,,,20
- geneY,,5,,,,,12,15,9,
-
-- Condense all consecutive commas. The above file will be converted into::
-
- geneX,10,20
- geneY,5,12,15,9
-
-</help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/convert_characters.py
--- a/tools/filters/convert_characters.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-#By, Guruprasad Ananda.
-
-from galaxy import eggs
-import sys, re
-
-def stop_err(msg):
- sys.stderr.write(msg)
- sys.exit()
-
-def main():
- if len(sys.argv) != 4:
- stop_err("usage: convert_characters infile from_char outfile")
-
- try:
- fin = open(sys.argv[1],'r')
- except:
- stop_err("Input file cannot be opened for reading.")
-
- from_char = sys.argv[2]
-
- try:
- fout = open(sys.argv[3],'w')
- except:
- stop_err("Output file cannot be opened for writing.")
-
- char_dict = {'T':'\t','s':'\s','Dt':'\.','C':',','D':'-','U':'_','P':'\|','Co':':'}
- from_ch = char_dict[from_char] + '+' #making an RE to match 1 or more occurences.
- skipped = 0
-
- for line in fin:
- line = line.strip()
- try:
- fout.write("%s\n" %(re.sub(from_ch,'\t',line)))
- except:
- skipped += 1
-
- if skipped:
- print "Skipped %d lines as invalid." %skipped
-
-if __name__ == "__main__":
- main()
\ No newline at end of file
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/convert_characters.xml
--- a/tools/filters/convert_characters.xml
+++ /dev/null
@@ -1,58 +0,0 @@
-<tool id="Convert characters1" name="Convert">
- <description>delimiters to TAB</description>
- <command interpreter="python">convert_characters.py $input $convert_from $out_file1</command>
- <inputs>
- <param name="convert_from" type="select" label="Convert all">
- <option value="s">Whitespaces</option>
- <option value="T">Tabs</option>
- <!--<option value="Sp">Spaces</option>-->
- <option value="Dt">Dots</option>
- <option value="C">Commas</option>
- <option value="D">Dashes</option>
- <option value="U">Underscores</option>
- <option value="P">Pipes</option>
- <option value="Co">Colons</option>
- </param>
- <param format="txt" name="input" type="data" label="in Query"/>
- </inputs>
- <outputs>
- <data format="tabular" name="out_file1" />
- </outputs>
- <tests>
- <test>
- <param name="convert_from" value="s"/>
- <param name="input" value="1.bed"/>
- <output name="out_file1" file="eq-convert.dat"/>
- </test>
- <test>
- <param name="convert_from" value="s"/>
- <param name="input" value="a.txt"/>
- <output name="out_file1" file="a.tab"/>
- </test>
- </tests>
- <help>
-
-**What it does**
-
-Converts all delimiters of a specified type into TABs. Consecutive characters are condensed. For example, if columns are separated by 5 spaces they will converted into 1 tab.
-
------
-
-**Example**
-
-- Input file::
-
- chrX||151283558|151283724|NM_000808_exon_8_0_chrX_151283559_r|0|-
- chrX|151370273|151370486|NM_000808_exon_9_0_chrX_151370274_r|0|-
- chrX|151559494|151559583|NM_018558_exon_1_0_chrX_151559495_f|0|+
- chrX|151564643|151564711|NM_018558_exon_2_0_chrX_151564644_f||||0|+
-
-- Converting all pipe delimiters of the above file to TABs will get::
-
- chrX 151283558 151283724 NM_000808_exon_8_0_chrX_151283559_r 0 -
- chrX 151370273 151370486 NM_000808_exon_9_0_chrX_151370274_r 0 -
- chrX 151559494 151559583 NM_018558_exon_1_0_chrX_151559495_f 0 +
- chrX 151564643 151564711 NM_018558_exon_2_0_chrX_151564644_f 0 +
-
-</help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/cutWrapper.pl
--- a/tools/filters/cutWrapper.pl
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/perl -w
-
-use strict;
-use warnings;
-
-my @columns = ();
-my $del = "";
-my @in = ();
-my @out = ();
-my $command = "";
-my $field = 0;
-
-# a wrapper for cut for use in galaxy
-# cutWrapper.pl [filename] [columns] [delim] [output]
-
-die "Check arguments\n" unless @ARGV == 4;
-
-$ARGV[1] =~ s/\s+//g;
-foreach ( split /,/, $ARGV[1] ) {
- if (m/^c\d{1,}$/i) {
- push (@columns, $_);
- $columns[@columns-1] =~s/c//ig;
- }
-}
-
-die "No columns specified, columns are not preceded with 'c', or commas are not used to separate column numbers: $ARGV[1]\n" if @columns == 0;
-
-my $column_delimiters_href = {
- 'T' => q{\t},
- 'C' => ",",
- 'D' => "-",
- 'U' => "_",
- 'P' => q{\|},
- 'Dt' => q{\.},
- 'Sp' => q{\s+}
-};
-
-$del = $column_delimiters_href->{$ARGV[2]};
-
-open (OUT, ">$ARGV[3]") or die "Cannot create $ARGV[2]:$!\n";
-open (IN, "<$ARGV[0]") or die "Cannot open $ARGV[0]:$!\n";
-
-while (my $line=<IN>) {
- if ($line =~ /^#/) {
- #Ignore comment lines
- } else {
- chop($line);
- @in = split(/$del/, $line);
- foreach $field (@columns) {
- if (defined($in[$field-1])) {
- push(@out, $in[$field-1]);
- } else {
- push(@out, ".");
- }
- }
- print OUT join("\t",@out), "\n";
- @out = ();
- }
-}
-
-#while (<IN>) {
-# chop;
-# @in = split /$del/;
-# foreach $field (@columns) {
-# if (defined($in[$field-1])) {
-# push(@out, $in[$field-1]);
-# } else {
-# push(@out, ".");
-# }
-# }
-# print OUT join("\t",@out), "\n";
-# @out = ();
-#}
-close IN;
-
-close OUT;
-
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/cutWrapper.xml
--- a/tools/filters/cutWrapper.xml
+++ /dev/null
@@ -1,202 +0,0 @@
-<tool id="Cut1" name="Cut" version="1.0.1">
- <description>columns from a table</description>
- <command interpreter="perl">cutWrapper.pl $input "$columnList" $delimiter $out_file1</command>
- <inputs>
- <param name="columnList" size="10" type="text" value="c1,c2" label="Cut columns"/>
- <param name="delimiter" type="select" label="Delimited by">
- <option value="T">Tab</option>
- <option value="Sp">Whitespace</option>
- <option value="Dt">Dot</option>
- <option value="C">Comma</option>
- <option value="D">Dash</option>
- <option value="U">Underscore</option>
- <option value="P">Pipe</option>
- </param>
- <param format="txt" name="input" type="data" label="From"/>
- </inputs>
- <outputs>
- <data format="tabular" name="out_file1" >
- <actions>
- <conditional name="delimiter">
- <when value="T">
- <conditional name="input">
- <when datatype_isinstance="interval">
- <action type="format" default="tabular">
- <option type="from_param" name="columnList" column="0" offset="0"><!-- chromCol is 1-->
-
- <filter type="insert_column" column="0" value="interval"/>
-
- <filter type="insert_column" ref="columnList" /><!-- startCol -->
-
- <filter type="insert_column" ref="columnList" /><!-- endCol -->
-
- <filter type="multiple_splitter" column="1" separator=","/>
- <filter type="column_strip" column="1"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="1" name="lower" />
- <filter type="param_value" column="1" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="1" strip="c"/><!-- get rid of c's -->
- <filter type="boolean" column="1" cast="int" />
-
- <filter type="multiple_splitter" column="2" separator=","/>
- <filter type="column_strip" column="2"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="2" name="lower" />
- <filter type="param_value" column="2" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="2" strip="c"/><!-- get rid of c's -->
- <filter type="boolean" column="2" cast="int" />
-
- <filter type="multiple_splitter" column="3" separator=","/>
- <filter type="column_strip" column="3"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="3" name="lower" />
- <filter type="param_value" column="3" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="3" strip="c"/><!-- get rid of c's -->
- <filter type="boolean" column="3" cast="int" />
-
- <filter type="metadata_value" ref="input" name="chromCol" column="1" />
- <filter type="metadata_value" ref="input" name="startCol" column="2" />
- <filter type="metadata_value" ref="input" name="endCol" column="3" />
-
- </option>
- </action>
-
- <conditional name="out_file1">
- <when datatype_isinstance="interval">
- <action type="metadata" name="chromCol">
- <option type="from_param" name="columnList" column="0" offset="0"><!-- chromCol is 0-->
- <filter type="multiple_splitter" column="0" separator=","/>
- <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="0" name="lower" />
- <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
- <filter type="insert_column" value="1" iterate="True" column="0"/>
- <filter type="boolean" column="1" cast="int" />
- <filter type="metadata_value" ref="input" name="chromCol" column="1" />
- </option>
- </action>
-
- <action type="metadata" name="startCol">
- <option type="from_param" name="columnList" column="0" offset="0"><!-- startCol is 0-->
- <filter type="multiple_splitter" column="0" separator=","/>
- <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="0" name="lower" />
- <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
- <filter type="insert_column" value="1" iterate="True" column="0"/>
- <filter type="boolean" column="1" cast="int" />
- <filter type="metadata_value" ref="input" name="startCol" column="1" />
- </option>
- </action>
-
- <action type="metadata" name="endCol">
- <option type="from_param" name="columnList" column="0" offset="0"><!-- endCol is 0-->
- <filter type="multiple_splitter" column="0" separator=","/>
- <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="0" name="lower" />
- <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
- <filter type="insert_column" value="1" iterate="True" column="0"/>
- <filter type="boolean" column="1" cast="int" />
- <filter type="metadata_value" ref="input" name="endCol" column="1" />
- </option>
- </action>
-
- <action type="metadata" name="nameCol" default="0">
- <option type="from_param" name="columnList" column="0" offset="0"><!-- nameCol is 0-->
- <filter type="multiple_splitter" column="0" separator=","/>
- <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="0" name="lower" />
- <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
- <filter type="insert_column" value="1" iterate="True" column="0"/>
- <filter type="boolean" column="1" cast="int" />
- <filter type="metadata_value" ref="input" name="nameCol" column="1" />
- </option>
- </action>
-
- <action type="metadata" name="strandCol" default="0">
- <option type="from_param" name="columnList" column="0" offset="0"><!-- strandCol is 0-->
- <filter type="multiple_splitter" column="0" separator=","/>
- <filter type="column_strip" column="0"/><!-- get rid of all external whitespace -->
- <filter type="string_function" column="0" name="lower" />
- <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
- <filter type="column_strip" column="0" strip="c"/><!-- get rid of c's -->
- <filter type="insert_column" value="1" iterate="True" column="0"/>
- <filter type="boolean" column="1" cast="int" />
- <filter type="metadata_value" ref="input" name="strandCol" column="1" />
- </option>
- </action>
- </when>
- </conditional>
-
- </when>
- </conditional>
- </when>
- </conditional>
- </actions>
- </data>
- </outputs>
- <tests>
- <test>
- <param name="columnList" value="c1,c4,c2,c3"/>
- <param name="delimiter" value="T"/>
- <param name="input" value="1.bed"/>
- <output name="out_file1" file="eq-cut.dat"/>
- </test>
- </tests>
- <help>
-
-.. class:: warningmark
-
-**WARNING: This tool breaks column assignments.** To re-establish column assignments run the tools and click on the pencil icon in the latest history item.
-
-.. class:: infomark
-
-The output of this tool is always in tabular format (e.g., if your original delimiters are commas, they will be replaced with tabs). For example:
-
- Cutting columns 1 and 3 from::
-
- apple,is,good
- windows,is,bad
-
- will give::
-
- apple good
- windows bad
-
------
-
-**What it does**
-
-This tool selects (cuts out) specified columns from the dataset.
-
-- Columns are specified as **c1**, **c2**, and so on. Column count begins with **1**
-- Columns can be specified in any order (e.g., **c2,c1,c6**)
-- If you specify more columns than actually present - empty spaces will be filled with dots
-
------
-
-**Example**
-
-Input dataset (six columns: c1, c2, c3, c4, c5, and c6)::
-
- chr1 10 1000 gene1 0 +
- chr2 100 1500 gene2 0 +
-
-**cut** on columns "**c1,c4,c6**" will return::
-
- chr1 gene1 +
- chr2 gene2 +
-
-**cut** on columns "**c6,c5,c4,c1**" will return::
-
- + 0 gene1 chr1
- + 0 gene2 chr2
-
-
-**cut** on columns "**c8,c7,c4**" will return::
-
- . . gene1
- . . gene2
-
-
-</help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/fixedValueColumn.pl
--- a/tools/filters/fixedValueColumn.pl
+++ /dev/null
@@ -1,34 +0,0 @@
-#! /usr/bin/perl -w
-
-use strict;
-use warnings;
-
-# fixedValueColumn.pl $input $out_file1 "expression" "iterate [yes|no]"
-
-my ($input, $out_file1, $expression, $iterate) = @ARGV;
-my $i = 0;
-my $numeric = 0;
-
-die "Check arguments\n" unless @ARGV == 4;
-
-open (DATA, "<$input") or die "Cannot open $input:$!\n";
-open (OUT, ">$out_file1") or die "Cannot create $out_file1:$!\n";
-
-if ($expression =~ m/^\d+$/) {
- $numeric = 1;
- $i = $expression;
-}
-
-while (<DATA>) {
- chop;
- if ($iterate eq "no") {
- print OUT "$_\t$expression\n";
- } else {
- print OUT "$_\t$i\n" if $numeric == 1;
- print OUT "$_\t$expression-$i\n" if $numeric == 0;
- ++$i;
- }
-}
-
-close DATA;
-close OUT;
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/fixedValueColumn.xml
--- a/tools/filters/fixedValueColumn.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-<tool id="addValue" name="Add column">
- <description>to an existing dataset</description>
- <command interpreter="perl">fixedValueColumn.pl $input $out_file1 "$exp" $iterate</command>
- <inputs>
- <param name="exp" size="20" type="text" value="1" label="Add this value"/>
- <param format="tabular" name="input" type="data" label="to Dataset" help="Dataset missing? See TIP below" />
- <param name="iterate" type="select" label="Iterate?">
- <option value="no">NO</option>
- <option value="yes">YES</option>
- </param>
- </inputs>
- <outputs>
- <data format="input" name="out_file1" metadata_source="input"/>
- </outputs>
- <tests>
- <test>
- <param name="exp" value="1"/>
- <param name="input" value="1.bed"/>
- <param name="iterate" value="no"/>
- <output name="out_file1" file="eq-addvalue.dat"/>
- </test>
- </tests>
- <help>
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
-
------
-
-**What it does**
-
-You can enter any value and it will be added as a new column to your dataset
-
------
-
-**Example**
-
-If you original data looks like this::
-
- chr1 10 100 geneA
- chr2 200 300 geneB
- chr2 400 500 geneC
-
-Typing **+** in the text box will generate::
-
- chr1 10 100 geneA +
- chr2 200 300 geneB +
- chr2 400 500 geneC +
-
-
-You can also add line numbers by selecting **Iterate: YES**. In this case if you enter **1** in the text box you will get::
-
- chr1 10 100 geneA 1
- chr2 200 300 geneB 2
- chr2 400 500 geneC 3
-
-
-
-</help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/headWrapper.pl
--- a/tools/filters/headWrapper.pl
+++ /dev/null
@@ -1,19 +0,0 @@
-#! /usr/bin/perl -w
-
-use strict;
-use warnings;
-
-# a wrapper for head for use in galaxy
-# headWrapper.pl [filename] [# lines to show] [output]
-
-die "Check arguments" unless @ARGV == 3;
-die "Line number must be an integer\n" unless $ARGV[1]=~ m/^\d+$/;
-
-open (OUT, ">$ARGV[2]") or die "Cannot create $ARGV[2]:$!\n";
-open (HEAD, "head -n $ARGV[1] $ARGV[0]|") or die "Cannot run head:$!\n";
-while (<HEAD>) {
- print OUT;
-}
-close OUT;
-close HEAD;
-
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/headWrapper.xml
--- a/tools/filters/headWrapper.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<tool id="Show beginning1" name="Select first">
- <description>lines from a dataset</description>
- <command interpreter="perl">headWrapper.pl $input $lineNum $out_file1</command>
- <inputs>
- <param name="lineNum" size="5" type="integer" value="10" label="Select first" help="lines"/>
- <param format="txt" name="input" type="data" label="from"/>
- </inputs>
- <outputs>
- <data format="input" name="out_file1" metadata_source="input"/>
- </outputs>
- <tests>
- <test>
- <param name="lineNum" value="10"/>
- <param name="input" value="1.bed"/>
- <output name="out_file1" file="eq-showbeginning.dat"/>
- </test>
- </tests>
- <help>
-
-**What it does**
-
-This tool outputs specified number of lines from the **beginning** of a dataset
-
------
-
-**Example**
-
-Selecting 2 lines from this::
-
- chr7 56632 56652 D17003_CTCF_R6 310 +
- chr7 56736 56756 D17003_CTCF_R7 354 +
- chr7 56761 56781 D17003_CTCF_R4 220 +
- chr7 56772 56792 D17003_CTCF_R7 372 +
- chr7 56775 56795 D17003_CTCF_R4 207 +
-
-will produce::
-
- chr7 56632 56652 D17003_CTCF_R6 310 +
- chr7 56736 56756 D17003_CTCF_R7 354 +
-
- </help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/mergeCols.py
--- a/tools/filters/mergeCols.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import sys, re
-
-def stop_err( msg ):
- sys.stderr.write( msg )
- sys.exit()
-
-def __main__():
- try:
- infile = open ( sys.argv[1], 'r')
- outfile = open ( sys.argv[2], 'w')
- except:
- stop_err( 'Cannot open or create a file\n' )
-
- if len( sys.argv ) < 4:
- stop_err( 'No columns to merge' )
- else:
- cols = sys.argv[3:]
-
- skipped_lines = 0
-
- for line in infile:
- line = line.rstrip( '\r\n' )
- if line and not line.startswith( '#' ):
- fields = line.split( '\t' )
- line += '\t'
- for col in cols:
- try:
- line += fields[ int( col ) -1 ]
- except:
- skipped_lines += 1
-
- print >>outfile, line
-
- if skipped_lines > 0:
- print 'Skipped %d invalid lines' % skipped_lines
-
-if __name__ == "__main__" : __main__()
\ No newline at end of file
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/mergeCols.xml
--- a/tools/filters/mergeCols.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<tool id="mergeCols1" name="Merge Columns" version="1.0.1">
- <description>together</description>
- <command interpreter="python">
- mergeCols.py
- $input1
- $out_file1
- $col1
- $col2
- #for $col in $columns
- ${col.datacol}
- #end for
- </command>
- <inputs>
- <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
- <param name="col1" label="Merge column" type="data_column" data_ref="input1" />
- <param name="col2" label="with column" type="data_column" data_ref="input1" help="Need to add more columns? Use controls below."/>
- <repeat name="columns" title="Columns">
- <param name="datacol" label="Add column" type="data_column" data_ref="input1" />
- </repeat>
- </inputs>
- <outputs>
- <data format="tabular" name="out_file1" />
- </outputs>
- <tests>
- <test>
- <param name="input1" value="1.bed"/>
- <param name="col1" value="4" />
- <param name="col2" value="1" />
- <param name="datacol" value="6" />
- <output name="out_file1" file="mergeCols.dat"/>
- </test>
- </tests>
-<help>
-
-.. class:: infomark
-
-**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
-
------
-
-**What it does**
-
-This tool merges columns together. Any number of valid columns can be merged in any order.
-
------
-
-**Example**
-
-Input dataset (five columns: c1, c2, c3, c4, and c5)::
-
- 1 10 1000 gene1 chr
- 2 100 1500 gene2 chr
-
-merging columns "**c5,c1**" will return::
-
- 1 10 1000 gene1 chr chr1
- 2 100 1500 gene2 chr chr2
-
-.. class:: warningmark
-
-Note that all original columns are preserved and the result of merge is added as the rightmost column.
- </help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/remove_beginning.pl
--- a/tools/filters/remove_beginning.pl
+++ /dev/null
@@ -1,33 +0,0 @@
-#! /usr/bin/perl -w
-
-use strict;
-use warnings;
-
-# Removes the specified number of lines from the beginning of the file.
-# remove_beginning.pl [input] [num_lines] [output]
-
-die "Check arguments" unless @ARGV == 3;
-
-my $inputfile = $ARGV[0];
-my $num_lines = $ARGV[1];
-my $outputfile = $ARGV[2];
-
-my $curCount=0;
-
-my $fhIn;
-open ($fhIn, "< $inputfile") or die "Cannot open source file";
-
-my $fhOut;
-open ($fhOut, "> $outputfile");
-
-while (<$fhIn>)
-{
- $curCount++;
- if ($curCount<=$num_lines)
- {
- next;
- }
- print $fhOut $_;
-}
-close ($fhIn) or die "Cannot close source file";
-close ($fhOut) or die "Cannot close output file";
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/remove_beginning.xml
--- a/tools/filters/remove_beginning.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<tool id="Remove beginning1" name="Remove beginning">
- <description>of a file</description>
- <command interpreter="perl">remove_beginning.pl $input $num_lines $out_file1</command>
- <inputs>
- <param name="num_lines" size="5" type="integer" value="1" label="Remove first" help="lines"/>
- <param format="txt" name="input" type="data" label="from"/>
- </inputs>
- <outputs>
- <data format="input" name="out_file1" metadata_source="input"/>
- </outputs>
- <tests>
- <test>
- <param name="num_lines" value="5"/>
- <param name="input" value="1.bed"/>
- <output name="out_file1" file="eq-removebeginning.dat"/>
- </test>
- </tests>
- <help>
-
-**What it does**
-
-This tool removes a specified number of lines from the beginning of a dataset.
-
------
-
-**Example**
-
-Input File::
-
- chr7 56632 56652 D17003_CTCF_R6 310 +
- chr7 56736 56756 D17003_CTCF_R7 354 +
- chr7 56761 56781 D17003_CTCF_R4 220 +
- chr7 56772 56792 D17003_CTCF_R7 372 +
- chr7 56775 56795 D17003_CTCF_R4 207 +
-
-After removing the first 3 lines the dataset will look like this::
-
- chr7 56772 56792 D17003_CTCF_R7 372 +
- chr7 56775 56795 D17003_CTCF_R4 207 +
-
-</help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/tailWrapper.pl
--- a/tools/filters/tailWrapper.pl
+++ /dev/null
@@ -1,19 +0,0 @@
-#! /usr/bin/perl -w
-
-use strict;
-use warnings;
-
-# a wrapper for tail for use in galaxy
-# lessWrapper.pl [filename] [# lines to show] [output]
-
-die "Check arguments" unless @ARGV == 3;
-die "Line number should be an integer\n" unless $ARGV[1]=~ m/^\d+$/;
-
-open (OUT, ">$ARGV[2]") or die "Cannot create $ARGV[2]:$!\n";
-open (TAIL, "tail -n $ARGV[1] $ARGV[0]|") or die "Cannot run tail:$!\n";
-while (<TAIL>) {
- print OUT;
-}
-close OUT;
-close TAIL;
-
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/tailWrapper.xml
--- a/tools/filters/tailWrapper.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<tool id="Show tail1" name="Select last">
- <description>lines from a dataset</description>
- <command interpreter="perl">tailWrapper.pl $input $lineNum $out_file1</command>
- <inputs>
- <param name="lineNum" size="5" type="integer" value="10" label="Select last" help="lines"/>
- <param format="txt" name="input" type="data" label="from"/>
- </inputs>
- <outputs>
- <data format="input" name="out_file1" metadata_source="input"/>
- </outputs>
- <tests>
- <test>
- <param name="lineNum" value="10"/>
- <param name="input" value="1.bed"/>
- <output name="out_file1" file="eq-showtail.dat"/>
- </test>
- </tests>
- <help>
-
-**What it does**
-
-This tool outputs specified number of lines from the **end** of a dataset
-
------
-
-**Example**
-
-- Input File::
-
- chr7 57134 57154 D17003_CTCF_R7 356 -
- chr7 57247 57267 D17003_CTCF_R4 207 +
- chr7 57314 57334 D17003_CTCF_R5 269 +
- chr7 57341 57361 D17003_CTCF_R7 375 +
- chr7 57457 57477 D17003_CTCF_R3 188 +
-
-- Show last two lines of above file. The result is::
-
- chr7 57341 57361 D17003_CTCF_R7 375 +
- chr7 57457 57477 D17003_CTCF_R3 188 +
-
- </help>
-</tool>
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/trimmer.py
--- a/tools/filters/trimmer.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import optparse
-
-def stop_err( msg ):
- sys.stderr.write( msg )
- sys.exit()
-
-def main():
- usage = """%prog [options]
-
-options (listed below) default to 'None' if omitted
- """
- parser = optparse.OptionParser(usage=usage)
-
- parser.add_option(
- '-a','--ascii',
- dest='ascii',
- action='store_true',
- default = False,
- help='Use ascii codes to defined ignored beginnings instead of raw characters')
-
- parser.add_option(
- '-q','--fastq',
- dest='fastq',
- action='store_true',
- default = False,
- help='The input data in fastq format. It selected the script skips every even line since they contain sequence ids')
-
- parser.add_option(
- '-i','--ignore',
- dest='ignore',
- help='A comma separated list on ignored beginnings (e.g., ">,@"), or its ascii codes (e.g., "60,42") if option -a is enabled')
-
- parser.add_option(
- '-s','--start',
- dest='start',
- default = '0',
- help='Trim from beginning to here (1-based)')
-
- parser.add_option(
- '-e','--end',
- dest='end',
- default = '0',
- help='Trim from here to the ned (1-based)')
-
- parser.add_option(
- '-f','--file',
- dest='input_txt',
- default = False,
- help='Name of file to be chopped. STDIN is default')
-
- parser.add_option(
- '-c','--column',
- dest='col',
- default = '0',
- help='Column to chop. If 0 = chop the whole line')
-
-
- options, args = parser.parse_args()
- invalid_starts = []
-
- if options.input_txt:
- infile = open ( options.input_txt, 'r')
- else:
- infile = sys.stdin
-
- if options.ignore and options.ignore != "None":
- invalid_starts = options.ignore.split(',')
-
- if options.ascii and options.ignore and options.ignore != "None":
- for i, item in enumerate( invalid_starts ):
- invalid_starts[i] = chr( int( item ) )
-
- col = int( options.col )
-
- for i, line in enumerate( infile ):
- line = line.rstrip( '\r\n' )
- if line:
-
- if options.fastq and i % 2 == 0:
- print line
- continue
-
-
- if line[0] not in invalid_starts:
- if col == 0:
- if int( options.end ) > 0:
- line = line[ int( options.start )-1 : int( options.end ) ]
- else:
- line = line[ int( options.start )-1 : ]
- else:
- fields = line.split( '\t' )
- if col-1 > len( fields ):
- stop_err('Column %d does not exist. Check input parameters\n' % col)
-
- if int( options.end ) > 0:
- fields[col - 1] = fields[col - 1][ int( options.start )-1 : int( options.end ) ]
- else:
- fields[col - 1] = fields[col - 1][ int( options.start )-1 : ]
- line = '\t'.join(fields)
- print line
-
-if __name__ == "__main__": main()
-
diff -r 63fc9cbe381d2719134f87c019911565e3a934a1 -r c68762fcb5a521007d89cb01455a0ad409011528 tools/filters/trimmer.xml
--- a/tools/filters/trimmer.xml
+++ /dev/null
@@ -1,120 +0,0 @@
-<tool id="trimmer" name="Trim" version="0.0.1">
- <description>leading or trailing characters</description>
- <command interpreter="python">
- trimmer.py -a -f $input1 -c $col -s $start -e $end -i $ignore $fastq > $out_file1
- </command>
- <inputs>
- <param format="tabular,txt" name="input1" type="data" label="this dataset"/>
- <param name="col" type="integer" value="0" label="Trim this column only" help="0 = process entire line" />
- <param name="start" type="integer" size="10" value="1" label="Trim from the beginning to this position" help="1 = do not trim the beginning"/>
- <param name="end" type="integer" size="10" value="0" label="Remove everything from this position to the end" help="0 = do not trim the end"/>
- <param name="fastq" type="select" label="Is input dataset in fastq format?" help="If set to YES, the tool will not trim evenly numbered lines (0, 2, 4, etc...)">
- <option selected="true" value="">No</option>
- <option value="-q">Yes</option>
- </param>
- <param name="ignore" type="select" display="checkboxes" multiple="True" label="Ignore lines beginning with these characters" help="lines beginning with these are not trimmed">
- <option value="62">></option>
- <option value="64">@</option>
- <option value="43">+</option>
- <option value="60"><</option>
- <option value="42">*</option>
- <option value="45">-</option>
- <option value="61">=</option>
- <option value="124">|</option>
- <option value="63">?</option>
- <option value="36">$</option>
- <option value="46">.</option>
- <option value="58">:</option>
- <option value="38">&</option>
- <option value="37">%</option>
- <option value="94">^</option>
- <option value="35">#</option>
- </param>
- </inputs>
- <outputs>
- <data name="out_file1" format="input" metadata_source="input1"/>
- </outputs>
- <tests>
- <test>
- <param name="input1" value="trimmer_tab_delimited.dat"/>
- <param name="col" value="0"/>
- <param name="start" value="1"/>
- <param name="end" value="13"/>
- <param name="ignore" value="62"/>
- <param name="fastq" value="No"/>
- <output name="out_file1" file="trimmer_a_f_c0_s1_e13_i62.dat"/>
- </test>
- <test>
- <param name="input1" value="trimmer_tab_delimited.dat"/>
- <param name="col" value="2"/>
- <param name="start" value="1"/>
- <param name="end" value="2"/>
- <param name="ignore" value="62"/>
- <param name="fastq" value="No"/>
- <output name="out_file1" file="trimmer_a_f_c2_s1_e2_i62.dat"/>
- </test>
-
- </tests>
-
- <help>
-
-
-**What it does**
-
-Trims specified number of characters from a dataset or its field (if dataset is tab-delimited).
-
------
-
-**Example 1**
-
-Trimming this dataset::
-
- 1234567890
- abcdefghijk
-
-by setting **Trim from the beginning to this position** to *2* and **Remove everything from this position to the end** to *6* will produce::
-
- 23456
- bcdef
-
------
-
-**Example 2**
-
-Trimming column 2 of this dataset::
-
- abcde 12345 fghij 67890
- fghij 67890 abcde 12345
-
-by setting **Trim content of this column only** to *2*, **Trim from the beginning to this position** to *2*, and **Remove everything from this position to the end** to *4* will produce::
-
- abcde 234 fghij 67890
- fghij 789 abcde 12345
-
------
-
-**Trimming FASTQ datasets**
-
-This tool can be used to trim sequences and quality strings in fastq datasets. This is done by selected *Yes* from the **Is input dataset in fastq format?** dropdown. If set to *Yes*, the tool will skip all even numbered lines (see warning below). For example, trimming last 5 bases of this dataset::
-
- @081017-and-081020:1:1:1715:1759
- GGACTCAGATAGTAATCCACGCTCCTTTAAAATATC
- +
- II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B
-
-cab done by setting **Remove everything from this position to the end** to 31::
-
- @081017-and-081020:1:1:1715:1759
- GGACTCAGATAGTAATCCACGCTCCTTTAAA
- +
- II#IIIIIII$5+.(9IIIIIII$%*$G$A3
-
-**Note** that headers are skipped.
-
-.. class:: warningmark
-
-**WARNING:** This tool will only work on properly formatted fastq datasets where (1) each read and quality string occupy one line and (2) '@' (read header) and "+" (quality header) lines are evenly numbered like in the above example.
-
-
- </help>
-</tool>
https://bitbucket.org/galaxy/galaxy-central/changeset/2f45da781f92/
changeset: 2f45da781f92
user: inithello
date: 2012-12-07 22:08:58
summary: Remove migrated tools from Galaxy main tool configuration.
affected #: 1 file
diff -r c68762fcb5a521007d89cb01455a0ad409011528 -r 2f45da781f926f667dd90ad23d23fd123716a535 tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -33,21 +33,11 @@
<tool file="extract/liftOver_wrapper.xml" /></section><section name="Text Manipulation" id="textutil">
- <tool file="filters/fixedValueColumn.xml" /><tool file="stats/column_maker.xml" /><tool file="filters/catWrapper.xml" />
- <tool file="filters/condense_characters.xml" />
- <tool file="filters/convert_characters.xml" />
- <tool file="filters/mergeCols.xml" /><tool file="filters/CreateInterval.xml" />
- <tool file="filters/cutWrapper.xml" />
- <tool file="filters/changeCase.xml" /><tool file="filters/pasteWrapper.xml" />
- <tool file="filters/remove_beginning.xml" /><tool file="filters/randomlines.xml" />
- <tool file="filters/headWrapper.xml" />
- <tool file="filters/tailWrapper.xml" />
- <tool file="filters/trimmer.xml" /><tool file="filters/wc_gnu.xml" /><tool file="filters/secure_hash_message_digest.xml" /></section>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0