2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ef3816ca5e0c/
Changeset: ef3816ca5e0c
User: Dave Bouvier
Date: 2013-06-04 17:05:17
Summary: Fix server error when displaying a tool dependency installation status when the tool dependency contains certain unicode characters in the displayed error message.
Affected #: 1 file
diff -r 6ae2d6a466b8f721aa39d3e183b36594d7b4e235 -r ef3816ca5e0cc88616c34144af00b4fcce5bf0d0 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1240,7 +1240,7 @@
if text:
if to_html:
try:
- escaped_text = text.decode( 'utf-8' )
+ escaped_text = unicodify( text )
escaped_text = escaped_text.encode( 'ascii', 'ignore' )
escaped_text = str( markupsafe.escape( escaped_text ) )
except UnicodeDecodeError, e:
https://bitbucket.org/galaxy/galaxy-central/commits/2a116647d7d7/
Changeset: 2a116647d7d7
Branch: stable
User: Dave Bouvier
Date: 2013-06-04 17:05:17
Summary: Fix server error when displaying a tool dependency installation status when the tool dependency contains certain unicode characters in the displayed error message.
Affected #: 1 file
diff -r cea3ddf6cddaac2f8703598307449ffc13240efc -r 2a116647d7d75d5088c43fe3ac1d10bd9e66a08c lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1214,7 +1214,7 @@
if text:
if to_html:
try:
- escaped_text = text.decode( 'utf-8' )
+ escaped_text = unicodify( text )
escaped_text = escaped_text.encode( 'ascii', 'ignore' )
escaped_text = str( markupsafe.escape( escaped_text ) )
except UnicodeDecodeError, e:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cea3ddf6cdda/
Changeset: cea3ddf6cdda
Branch: stable
User: natefoo
Date: 2013-06-03 22:17:31
Summary: Update tag for stable_2013.06.03
Affected #: 1 file
diff -r 524f246ca85395082719ae7a6ff72260d7ad5612 -r cea3ddf6cddaac2f8703598307449ffc13240efc .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -2,4 +2,4 @@
1c717491139269651bb59687563da9410b84c65d release_2013.02.08
75f09617abaadbc8cc732bb8ee519decaeb56ea7 release_2013.04.01
2cc8d10988e03257dc7b97f8bb332c7df745d1dd security_2013.04.08
-58811a78af8a09b77405dc343a5b2795f4cc6e88 release_2013.06.03
+524f246ca85395082719ae7a6ff72260d7ad5612 release_2013.06.03
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a243d2dbca9d/
Changeset: a243d2dbca9d
Branch: next-stable
User: greg
Date: 2013-06-03 21:40:37
Summary: Fix for rendering xml elements that were not loaded using normal parsing methods.
Affected #: 2 files
diff -r 09d45fa9a84c1a91e7b14b94186bd8128883ddc4 -r a243d2dbca9d88724d03848a4136d9ae88d0b174 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -193,7 +193,7 @@
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
for elem in config_elems:
- os.write( fd, '%s' % xml_util.xml_to_string( elem ) )
+ os.write( fd, '%s' % xml_util.xml_to_string( elem, use_indent=True ) )
os.write( fd, '</toolbox>\n' )
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
diff -r 09d45fa9a84c1a91e7b14b94186bd8128883ddc4 -r a243d2dbca9d88724d03848a4136d9ae88d0b174 lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -41,6 +41,28 @@
fh.close()
return tmp_filename
+def indent( elem, level=0 ):
+ """
+ Prints an XML tree with each node indented according to its depth. This method is used to print the shed tool config (e.g., shed_tool_conf.xml
+ from the in-memory list of config_elems because each config_elem in the list may be a hierarchical structure that was not created using the
+ parse_xml() method below, and so will not be properly written with xml.etree.ElementTree.tostring() without manually indenting the tree first.
+ """
+ i = "\n" + level * " "
+ if len( elem ):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + " "
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for child in elem:
+ indent( child, level+1 )
+ if not child.tail or not child.tail.strip():
+ child.tail = i
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and ( not elem.tail or not elem.tail.strip() ):
+ elem.tail = i
+
def parse_xml( file_name ):
"""Returns a parsed xml tree with comments intact."""
error_message = ''
@@ -64,9 +86,15 @@
fobj.close()
return tree, error_message
-def xml_to_string( elem, encoding='utf-8' ):
- if using_python_27:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+def xml_to_string( elem, encoding='utf-8', use_indent=False ):
+ if elem:
+ if use_indent:
+ # We were called from suc.config_elems_to_xml_file(), so set the level to 1 since level 0 is the <toolbox> tag set.
+ indent( elem, level=1 )
+ if using_python_27:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+ else:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
else:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ xml_str = ''
return xml_str
https://bitbucket.org/galaxy/galaxy-central/commits/8cbb23ed305e/
Changeset: 8cbb23ed305e
User: greg
Date: 2013-06-03 21:40:58
Summary: Merged from next-stable
Affected #: 2 files
diff -r 75bea1afc2e3e3a49c95a3540a27be165766aaac -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -193,7 +193,7 @@
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
for elem in config_elems:
- os.write( fd, '%s' % xml_util.xml_to_string( elem ) )
+ os.write( fd, '%s' % xml_util.xml_to_string( elem, use_indent=True ) )
os.write( fd, '</toolbox>\n' )
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
diff -r 75bea1afc2e3e3a49c95a3540a27be165766aaac -r 8cbb23ed305e6803da733eb7be74bab779ee9ea4 lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -41,6 +41,28 @@
fh.close()
return tmp_filename
+def indent( elem, level=0 ):
+ """
+ Prints an XML tree with each node indented according to its depth. This method is used to print the shed tool config (e.g., shed_tool_conf.xml
+ from the in-memory list of config_elems because each config_elem in the list may be a hierarchical structure that was not created using the
+ parse_xml() method below, and so will not be properly written with xml.etree.ElementTree.tostring() without manually indenting the tree first.
+ """
+ i = "\n" + level * " "
+ if len( elem ):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + " "
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for child in elem:
+ indent( child, level+1 )
+ if not child.tail or not child.tail.strip():
+ child.tail = i
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and ( not elem.tail or not elem.tail.strip() ):
+ elem.tail = i
+
def parse_xml( file_name ):
"""Returns a parsed xml tree with comments intact."""
error_message = ''
@@ -64,9 +86,15 @@
fobj.close()
return tree, error_message
-def xml_to_string( elem, encoding='utf-8' ):
- if using_python_27:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+def xml_to_string( elem, encoding='utf-8', use_indent=False ):
+ if elem:
+ if use_indent:
+ # We were called from suc.config_elems_to_xml_file(), so set the level to 1 since level 0 is the <toolbox> tag set.
+ indent( elem, level=1 )
+ if using_python_27:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+ else:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
else:
- xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ xml_str = ''
return xml_str
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/09d45fa9a84c/
Changeset: 09d45fa9a84c
Branch: next-stable
User: Dave Bouvier
Date: 2013-06-03 18:33:02
Summary: Fix for installing a tool dependencies when set_environment actions are distributed between multiple action tag groups.
Affected #: 1 file
diff -r 6a8584218f61c5cf7aa129677a7548c610cd0cca -r 09d45fa9a84c1a91e7b14b94186bd8128883ddc4 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -130,10 +130,10 @@
if package_install_version == '1.0':
# Since the required tool dependency is installed for a repository dependency, we first need to inspect the <actions> tag set to find
# the <action type="set_environment"> tag.
+ env_var_dicts = []
for actions_elem in package_elem:
for action_elem in actions_elem:
action_type = action_elem.get( 'type', 'shell_command' )
- env_var_dicts = []
if action_type == 'set_environment':
# <action type="set_environment">
# <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
@@ -143,7 +143,8 @@
if env_elem.tag == 'environment_variable':
env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
if env_var_dict:
- env_var_dicts.append( env_var_dict )
+ if env_var_dict not in env_var_dicts:
+ env_var_dicts.append( env_var_dict )
elif action_type == 'setup_virtualenv':
# Add the virtualenv's site-packages to PYTHONPATH and bin to PATH. This is a bit hackish.
site_packages_command = "%s -c 'import os, sys; print os.path.join(sys.prefix, \"lib\", \"python\" + sys.version[:3], \"site-packages\")'" % os.path.join( install_dir, "venv", "bin", "python" )
@@ -155,40 +156,40 @@
else:
env_var_dicts.append( dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) )
env_var_dicts.append( dict( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'venv', 'bin' ) ) )
- if env_var_dicts:
- if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
- app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- # Handle the case where we have an installed required repository due to the prior_installation_required = True
- # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
- # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
- # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
- # file generated for the installed required repository. Each env_var_dict currently looks something like this:
- # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
- # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
- # with the associated value in the env.sh file.
- new_env_var_dicts = []
- env_sh_file_dir = get_tool_dependency_install_dir( app=app,
- repository_name=required_repository.name,
- repository_owner=required_repository.owner,
- repository_changeset_revision=required_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
- for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
- env_var_dict = env_var_dicts[ i ]
- action = env_var_dict.get( 'action', None )
- name = env_var_dict.get( 'name', None )
- value = env_var_dict.get( 'value', None )
- if action and name and value:
- new_value = parse_env_shell_entry( action, name, value, line )
- env_var_dict[ 'value' ] = new_value
- new_env_var_dicts.append( env_var_dict )
- action_dict[ 'environment_variable' ] = new_env_var_dicts
- else:
- action_dict[ 'environment_variable' ] = env_var_dicts
- actions.append( ( 'set_environment', action_dict ) )
- return tool_dependency, actions
+ if env_var_dicts:
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
+ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # Handle the case where we have an installed required repository due to the prior_installation_required = True
+ # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
+ # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
+ # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
+ # file generated for the installed required repository. Each env_var_dict currently looks something like this:
+ # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
+ # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
+ # with the associated value in the env.sh file.
+ new_env_var_dicts = []
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
+ env_var_dict = env_var_dicts[ i ]
+ action = env_var_dict.get( 'action', None )
+ name = env_var_dict.get( 'name', None )
+ value = env_var_dict.get( 'value', None )
+ if action and name and value:
+ new_value = parse_env_shell_entry( action, name, value, line )
+ env_var_dict[ 'value' ] = new_value
+ new_env_var_dicts.append( env_var_dict )
+ action_dict[ 'environment_variable' ] = new_env_var_dicts
+ else:
+ action_dict[ 'environment_variable' ] = env_var_dicts
+ actions.append( ( 'set_environment', action_dict ) )
+ return tool_dependency, actions
else:
raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
return None, actions
@@ -464,7 +465,12 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- actions.append( ( action_type, action_dict ) )
+ action_tuple = ( action_type, action_dict )
+ if action_type == 'set_environment':
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ else:
+ actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
https://bitbucket.org/galaxy/galaxy-central/commits/75bea1afc2e3/
Changeset: 75bea1afc2e3
User: Dave Bouvier
Date: 2013-06-03 18:35:52
Summary: Merge in next-stable.
Affected #: 1 file
diff -r 83213accd759e752538c025275557d3dfc5d1433 -r 75bea1afc2e3e3a49c95a3540a27be165766aaac lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -130,10 +130,10 @@
if package_install_version == '1.0':
# Since the required tool dependency is installed for a repository dependency, we first need to inspect the <actions> tag set to find
# the <action type="set_environment"> tag.
+ env_var_dicts = []
for actions_elem in package_elem:
for action_elem in actions_elem:
action_type = action_elem.get( 'type', 'shell_command' )
- env_var_dicts = []
if action_type == 'set_environment':
# <action type="set_environment">
# <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
@@ -143,7 +143,8 @@
if env_elem.tag == 'environment_variable':
env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir )
if env_var_dict:
- env_var_dicts.append( env_var_dict )
+ if env_var_dict not in env_var_dicts:
+ env_var_dicts.append( env_var_dict )
elif action_type == 'setup_virtualenv':
# Add the virtualenv's site-packages to PYTHONPATH and bin to PATH. This is a bit hackish.
site_packages_command = "%s -c 'import os, sys; print os.path.join(sys.prefix, \"lib\", \"python\" + sys.version[:3], \"site-packages\")'" % os.path.join( install_dir, "venv", "bin", "python" )
@@ -155,40 +156,40 @@
else:
env_var_dicts.append( dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) )
env_var_dicts.append( dict( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'venv', 'bin' ) ) )
- if env_var_dicts:
- if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
- app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- # Handle the case where we have an installed required repository due to the prior_installation_required = True
- # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
- # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
- # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
- # file generated for the installed required repository. Each env_var_dict currently looks something like this:
- # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
- # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
- # with the associated value in the env.sh file.
- new_env_var_dicts = []
- env_sh_file_dir = get_tool_dependency_install_dir( app=app,
- repository_name=required_repository.name,
- repository_owner=required_repository.owner,
- repository_changeset_revision=required_repository.installed_changeset_revision,
- tool_dependency_type='package',
- tool_dependency_name=package_name,
- tool_dependency_version=package_version )
- env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
- for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
- env_var_dict = env_var_dicts[ i ]
- action = env_var_dict.get( 'action', None )
- name = env_var_dict.get( 'name', None )
- value = env_var_dict.get( 'value', None )
- if action and name and value:
- new_value = parse_env_shell_entry( action, name, value, line )
- env_var_dict[ 'value' ] = new_value
- new_env_var_dicts.append( env_var_dict )
- action_dict[ 'environment_variable' ] = new_env_var_dicts
- else:
- action_dict[ 'environment_variable' ] = env_var_dicts
- actions.append( ( 'set_environment', action_dict ) )
- return tool_dependency, actions
+ if env_var_dicts:
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
+ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # Handle the case where we have an installed required repository due to the prior_installation_required = True
+ # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
+ # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
+ # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
+ # file generated for the installed required repository. Each env_var_dict currently looks something like this:
+ # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
+ # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
+ # with the associated value in the env.sh file.
+ new_env_var_dicts = []
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
+ env_var_dict = env_var_dicts[ i ]
+ action = env_var_dict.get( 'action', None )
+ name = env_var_dict.get( 'name', None )
+ value = env_var_dict.get( 'value', None )
+ if action and name and value:
+ new_value = parse_env_shell_entry( action, name, value, line )
+ env_var_dict[ 'value' ] = new_value
+ new_env_var_dicts.append( env_var_dict )
+ action_dict[ 'environment_variable' ] = new_env_var_dicts
+ else:
+ action_dict[ 'environment_variable' ] = env_var_dicts
+ actions.append( ( 'set_environment', action_dict ) )
+ return tool_dependency, actions
else:
raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
return None, actions
@@ -464,7 +465,12 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- actions.append( ( action_type, action_dict ) )
+ action_tuple = ( action_type, action_dict )
+ if action_type == 'set_environment':
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ else:
+ actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3df028d4cab4/
Changeset: 3df028d4cab4
Branch: next-stable
User: jgoecks
Date: 2013-06-03 15:43:42
Summary: Remove documentation for Tophat -F option because it is no longer supported.
Affected #: 2 files
diff -r 6dbe92e40de6d352b4d54ec31c89e83fafcdd532 -r 3df028d4cab449f4ee2b5db771ec545d88e3440e tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -350,7 +350,7 @@
<test><!-- Tophat commands:
bowtie2-build -f test-data/tophat_in1.fasta tophat_in1
- tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -400,7 +400,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -506,9 +506,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
diff -r 6dbe92e40de6d352b4d54ec31c89e83fafcdd532 -r 3df028d4cab449f4ee2b5db771ec545d88e3440e tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -403,7 +403,7 @@
<test><!-- Tophat commands:
bowtie-build -f test-data/tophat_in1.fasta tophat_in1
- tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -445,7 +445,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -538,9 +538,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
https://bitbucket.org/galaxy/galaxy-central/commits/1380f4546418/
Changeset: 1380f4546418
User: jgoecks
Date: 2013-06-03 15:44:02
Summary: Automated merge of next-stable
Affected #: 2 files
diff -r 716f29e4c77d2246a49c92ea205734f768369fbb -r 1380f4546418736a0645267868de615ac6d1b0e1 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -350,7 +350,7 @@
<test><!-- Tophat commands:
bowtie2-build -f test-data/tophat_in1.fasta tophat_in1
- tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -400,7 +400,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -506,9 +506,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
diff -r 716f29e4c77d2246a49c92ea205734f768369fbb -r 1380f4546418736a0645267868de615ac6d1b0e1 tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -403,7 +403,7 @@
<test><!-- Tophat commands:
bowtie-build -f test-data/tophat_in1.fasta tophat_in1
- tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -445,7 +445,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -538,9 +538,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.