galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: greg: Use mercurial's purge extension to remove untracked repository files and empty directories, and fix a bug related to deleting repository files.
by Bitbucket 15 Nov '11
by Bitbucket 15 Nov '11
15 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/04acee047c2c/
changeset: 04acee047c2c
user: greg
date: 2011-11-15 17:51:05
summary: Use mercurial's purge extension to remove untracked repository files and empty directories, and fix a bug related to deleting repository files.
affected #: 2 files
diff -r bdf334e6017658b0864ae38552d103018d956caa -r 04acee047c2c585730e9a8266de81d25d5c25d7f lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -589,36 +589,36 @@
elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
# We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
- cmd = 'hg status'
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stdout = open( tmp_name, 'wb' )
+ # We'll use mercurial's purge extension to do this. Using this extension requires the following entry
+ # in the repository's hgrc file which was not required for some time, so we'll add it if it's missing.
+ # [extensions]
+ # hgext.purge=
+ lines = repo.opener( 'hgrc', 'rb' ).readlines()
+ if not '[extensions]\n' in lines:
+ # No extensions have been added at all, so just append to the file.
+ fp = repo.opener( 'hgrc', 'a' )
+ fp.write( '[extensions]\n' )
+ fp.write( 'hgext.purge=\n' )
+ fp.close()
+ elif not 'hgext.purge=\n' in lines:
+ # The file includes and [extensions] section, but we need to add the
+ # purge extension.
+ fp = repo.opener( 'hgrc', 'wb' )
+ for line in lines:
+ if line.startswith( '[extensions]' ):
+ fp.write( line )
+ fp.write( 'hgext.purge=\n' )
+ else:
+ fp.write( line )
+ fp.close()
+ cmd = 'hg purge'
os.chdir( repo_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stdout=tmp_stdout.fileno() )
- returncode = proc.wait()
+ proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
+ return_code = proc.wait()
os.chdir( current_working_dir )
- tmp_stdout.close()
- if returncode == 0:
- for i, line in enumerate( open( tmp_name ) ):
- if line.startswith( '?' ) or line.startswith( 'I' ):
- files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
- elif line.startswith( 'M' ) or line.startswith( 'A' ) or line.startswith( 'R' ):
- files_to_commit.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
- for full_path in files_to_remove_from_disk:
- # We'll remove all files that are not tracked or ignored.
- if os.path.isdir( full_path ):
- try:
- os.rmdir( full_path )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( full_path ):
- os.remove( full_path )
- dir = os.path.split( full_path )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
+ if return_code != 0:
+ output = proc.stdout.read( 32768 )
+ log.debug( 'hg purge failed in repository directory %s, reason: %s' % ( repo_dir, output ) )
if files_to_commit:
if not commit_message:
commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
diff -r bdf334e6017658b0864ae38552d103018d956caa -r 04acee047c2c585730e9a8266de81d25d5c25d7f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1042,13 +1042,11 @@
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
- # Create a .hg/hgrc file that looks something like this:
- # [web]
- # allow_push = test
- # name = convert_characters1
- # push_ssl = False
# Since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
+ # The hg purge extension purges all files and directories not being tracked by
+ # mercurial in the current repository. It'll remove unknown files and empty
+ # directories. This is used in the update_for_browsing() method.
repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
@@ -1058,6 +1056,8 @@
fp.write( 'allow_push = %s\n' % repository.user.username )
fp.write( 'name = %s\n' % repository.name )
fp.write( 'push_ssl = false\n' )
+ fp.write( '[extensions]\n' )
+ fp.write( 'hgext.purge=' )
fp.close()
@web.expose
def browse_repository( self, trans, id, **kwd ):
@@ -1150,7 +1150,7 @@
tip = repository.tip
for selected_file in selected_files_to_delete:
try:
- commands.remove( repo.ui, repo, repo_file, force=True )
+ commands.remove( repo.ui, repo, selected_file, force=True )
except Exception, e:
# I never have a problem with commands.remove on a Mac, but in the test/production
# tool shed environment, it throws an exception whenever I delete all files from a
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: fubar: Bump FastQC version in preparation for updating executable version
by Bitbucket 15 Nov '11
by Bitbucket 15 Nov '11
15 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bdf334e60176/
changeset: bdf334e60176
user: fubar
date: 2011-11-15 17:09:50
summary: Bump FastQC version in preparation for updating executable version
affected #: 1 file
diff -r 9ad75ecd32daa6d97f20bf6ff9db354e7052d5b7 -r bdf334e6017658b0864ae38552d103018d956caa tools/rgenetics/rgFastQC.xml
--- a/tools/rgenetics/rgFastQC.xml
+++ b/tools/rgenetics/rgFastQC.xml
@@ -1,4 +1,4 @@
-<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.1">
+<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.2"><description>using FastQC from Babraham</description><command interpreter="python">
rgFastQC.py -i $input_file -d $html_file.files_path -o $html_file -n "$out_prefix" -f $input_file.ext -e ${GALAXY_DATA_INDEX_DIR}/shared/jars/FastQC/fastqc
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Revert file content checks when up0loading to a tool shed repository as they're currently too restrictive.
by Bitbucket 14 Nov '11
by Bitbucket 14 Nov '11
14 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9ad75ecd32da/
changeset: 9ad75ecd32da
user: greg
date: 2011-11-14 22:59:26
summary: Revert file content checks when up0loading to a tool shed repository as they're currently too restrictive.
affected #: 1 file
diff -r f640c7bd6ffc4904996859e06157a5f67671f978 -r 9ad75ecd32daa6d97f20bf6ff9db354e7052d5b7 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -101,32 +101,34 @@
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
- ok, message = self.__check_file_content( uploaded_file_name )
- if ok:
- # Move the uploaded file to the load_point within the repository hierarchy.
- shutil.move( uploaded_file_name, full_path )
- commands.add( repo.ui, repo, full_path )
- try:
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- except Exception, e:
- # I never have a problem with commands.commit on a Mac, but in the test/production
- # tool shed environment, it occasionally throws a "TypeError: array item must be char"
- # exception. If this happens, we'll try the following.
- repo.dirstate.write()
- repo.commit( user=trans.user.username, text=commit_message )
- if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample
- # file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary as well as
- # appending them to the shed's tool_data_table_conf.xml file on disk.
- error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
- if error:
- message = '%s<br/>%s' % ( message, error_message )
- if full_path.endswith( '.loc.sample' ):
- # Handle the special case where a xxx.loc.sample file is
- # being uploaded by copying it to ~/tool-data/xxx.loc.
- copy_sample_loc_file( trans, full_path )
- handle_email_alerts( trans, repository )
+ # TODO: enhance this method to set a flag and alert an admin to review content since
+ # the hard checks are too restrictive.
+ #ok, message = self.__check_file_content( uploaded_file_name )
+ #if ok:
+ # Move the uploaded file to the load_point within the repository hierarchy.
+ shutil.move( uploaded_file_name, full_path )
+ commands.add( repo.ui, repo, full_path )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
+ if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample
+ # file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary as well as
+ # appending them to the shed's tool_data_table_conf.xml file on disk.
+ error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
+ if error:
+ message = '%s<br/>%s' % ( message, error_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
+ handle_email_alerts( trans, repository )
if ok:
# Update the repository files for browsing.
update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
@@ -192,6 +194,9 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
+ """
+ # TODO: enhance this method to set a flag and alert an admin to review content since
+ # the hard checks are too restrictive.
for filename_in_archive in filenames_in_archive:
if os.path.isfile( filename_in_archive ):
ok, message = self.__check_file_content( filename_in_archive )
@@ -200,6 +205,7 @@
current_working_dir = os.getcwd()
update_for_browsing( trans, repository, current_working_dir )
return False, message, []
+ """
if remove_repo_files_not_in_tar and not repository.is_new:
# We have a repository that is not new (it contains files), so discover
# those files that are in the repository, but not in the uploaded archive.
@@ -343,13 +349,15 @@
return False, message
return True, ''
def __check_file_content( self, file_path ):
+ return True, ''
message = ''
ok = True
+ head, tail = os.path.split( file_path )
if check_html( file_path ):
- message = 'Files containing HTML content cannot be uploaded to a Galaxy tool shed.'
+ message = 'The file <b>%s</b> contains HTML content which cannot be uploaded to a Galaxy tool shed.' % str( tail )
ok = False
elif check_image( file_path ):
# For now we won't allow images to be uploaded.
- message = 'Files containing images cannot be uploaded to a Galaxy tool shed.'
+ message = 'The file <b>%s</b> contains image content that cannot be uploaded to a Galaxy tool shed.' % str( tail )
ok = False
return ok, message
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f640c7bd6ffc/
changeset: f640c7bd6ffc
user: greg
date: 2011-11-14 22:19:56
summary: Add baseline support for handling data types included in tool shed repositories, add the ability to upload files to tool shed repositories by entering a URL in the upload form, provide much better file content checking when uploading either single files or archives to a tool shed repository (niether html content nor images are currently allowed), enhance error messaging when tool config parameter tags are not functionally correct.
affected #: 11 files
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -164,9 +164,14 @@
@classmethod
def build( cls, tool, param ):
"""Factory method to create parameter of correct type"""
+ param_name = param.get( "name" )
+ if not param_name:
+ raise ValueError( "Tool parameters require a 'name'" )
param_type = param.get("type")
- if not param_type or param_type not in parameter_types:
- raise ValueError( "Unknown tool parameter type '%s'" % param_type )
+ if not param_type:
+ raise ValueError( "Tool parameter '%s' requires a 'type'" % ( param_name ) )
+ elif param_type not in parameter_types:
+ raise ValueError( "Tool parameter '%s' uses an unknown type '%s'" % ( param_name, param_type ) )
else:
return parameter_types[param_type]( tool, param )
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -6,6 +6,7 @@
from time import strftime
from galaxy import config, tools, web, util
from galaxy.util.hash_util import *
+from galaxy.util.json import json_fix
from galaxy.web import error, form, url_for
from galaxy.model.orm import *
from galaxy.workflow.modules import *
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py
+++ b/lib/galaxy/web/controllers/admin.py
@@ -4,12 +4,14 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.tools.search import ToolBoxSearch
from galaxy.tools import ToolSection, json_fix
+from galaxy.util import inflector
import logging
log = logging.getLogger( __name__ )
from galaxy.actions.admin import AdminActions
from galaxy.web.params import QuotaParamParser
from galaxy.exceptions import *
+import galaxy.datatypes.registry
class UserListGrid( grids.Grid ):
class EmailColumn( grids.TextColumn ):
@@ -871,6 +873,10 @@
os.chdir( current_working_dir )
tmp_stderr.close()
if returncode == 0:
+ # Load data types required by tools.
+ # TODO: uncomment the following when we're ready...
+ #self.__load_datatypes( trans, repo_files_dir )
+ # Load tools and tool data files required by them.
sample_files, repository_tools_tups = self.__get_repository_tools_and_sample_files( trans, tool_path, repo_files_dir )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
@@ -920,8 +926,9 @@
status = 'error'
if installed_repository_names:
installed_repository_names.sort()
- message += 'These %d repositories were installed and all tools were loaded into tool panel section <b>%s</b>:<br/>' % \
- ( len( installed_repository_names ), tool_section.name )
+ num_repositories_installed = len( installed_repository_names )
+ message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
+ ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ), tool_section.name )
for i, repo_name in enumerate( installed_repository_names ):
if i == len( installed_repository_names ) -1:
message += '%s.<br/>' % repo_name
@@ -1171,6 +1178,45 @@
error = tmp_stderr.read()
tmp_stderr.close()
log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
+ def __load_datatypes( self, trans, repo_files_dir ):
+ # Find datatypes_conf.xml if it exists.
+ datatypes_config = None
+ for root, dirs, files in os.walk( repo_files_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == 'datatypes_conf.xml':
+ datatypes_config = os.path.abspath( os.path.join( root, name ) )
+ break
+ if datatypes_config:
+ # Parse datatypes_config.
+ tree = ElementTree.parse( datatypes_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ datatype_files = root.find( 'datatype_files' )
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ relative_path = None
+ for root, dirs, files in os.walk( repo_files_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ relative_path = os.path.join( root, name )
+ break
+ relative_head, relative_tail = os.path.split( relative_path )
+ # TODO: get the import_module by parsing the <registration><datatype> tags
+ if datatype_file_name.find( '.' ) > 0:
+ import_module = datatype_file_name.split( '.' )[ 0 ]
+ else:
+ import_module = datatype_file_name
+ try:
+ sys.path.insert( 0, relative_head )
+ module = __import__( import_module )
+ sys.path.pop( 0 )
+ except Exception, e:
+ log.debug( "Execption importing datatypes code file included in installed repository: %s" % str( e ) )
+ trans.app.datatypes_registry = galaxy.datatypes.registry.Registry( trans.app.config.root, datatypes_config )
def __get_repository_tools_and_sample_files( self, trans, tool_path, repo_files_dir ):
# The sample_files list contains all files whose name ends in .sample
sample_files = []
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -124,34 +124,6 @@
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
.first()
-def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received exported_workflow_dict. Store everything in the database.
- """
- if 'workflows' in metadata_dict:
- metadata_dict[ 'workflows' ].append( exported_workflow_dict )
- else:
- metadata_dict[ 'workflows' ] = [ exported_workflow_dict ]
- return metadata_dict
-def new_workflow_metadata_required( trans, id, metadata_dict ):
- """
- Currently everything about an exported workflow except the name is hard-coded, so there's
- no real way to differentiate versions of exported workflows. If this changes at some future
- time, this method should be enhanced accordingly.
- """
- if 'workflows' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, id )
- if repository_metadata:
- if repository_metadata.metadata:
- # The repository has metadata, so update the workflows value - no new record is needed.
- return False
- else:
- # There is no saved repository metadata, so we need to create a new repository_metadata table record.
- return True
- # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
- # record is not needed.
- return False
def generate_clone_url( trans, repository_id ):
repository = get_repository( trans, repository_id )
protocol, base = trans.request.base.split( '://' )
@@ -313,6 +285,62 @@
# The received metadata_dict includes no metadata for tools, so a new repository_metadata table
# record is not needed.
return False
+def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been applied
+ to the received exported_workflow_dict. Store everything in the database.
+ """
+ if 'workflows' in metadata_dict:
+ metadata_dict[ 'workflows' ].append( exported_workflow_dict )
+ else:
+ metadata_dict[ 'workflows' ] = [ exported_workflow_dict ]
+ return metadata_dict
+def new_workflow_metadata_required( trans, id, metadata_dict ):
+ """
+ Currently everything about an exported workflow except the name is hard-coded, so there's
+ no real way to differentiate versions of exported workflows. If this changes at some future
+ time, this method should be enhanced accordingly.
+ """
+ if 'workflows' in metadata_dict:
+ repository_metadata = get_latest_repository_metadata( trans, id )
+ if repository_metadata:
+ if repository_metadata.metadata:
+ # The repository has metadata, so update the workflows value - no new record is needed.
+ return False
+ else:
+ # There is no saved repository metadata, so we need to create a new repository_metadata table record.
+ return True
+ # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
+ # record is not needed.
+ return False
+def generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been applied
+ to the received datatypes_config.
+ """
+ # Parse datatypes_config.
+ tree = ElementTree.parse( datatypes_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ repository_datatype_code_files = []
+ datatype_files = root.find( 'datatype_files' )
+ if datatype_files:
+ for elem in datatype_files.findall( 'datatype_file' ):
+ name = elem.get( 'name', None )
+ repository_datatype_code_files.append( name )
+ metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
+ datatypes = []
+ registration = root.find( 'registration' )
+ if registration:
+ for elem in registration.findall( 'datatype' ):
+ extension = elem.get( 'extension', None )
+ dtype = elem.get( 'type', None )
+ mimetype = elem.get( 'mimetype', None )
+ datatypes.append( dict( extension=extension,
+ dtype=dtype,
+ mimetype=mimetype ) )
+ metadata_dict[ 'datatypes' ] = datatypes
+ return metadata_dict
def set_repository_metadata( trans, id, changeset_revision, **kwd ):
"""Set repository metadata"""
message = ''
@@ -322,28 +350,34 @@
repo = hg.repository( get_configured_ui(), repo_dir )
invalid_files = []
sample_files = []
+ datatypes_config = None
ctx = get_changectx_for_changeset( trans, repo, changeset_revision )
if ctx is not None:
metadata_dict = {}
if changeset_revision == repository.tip:
- # Find all special .sample files first.
+ # Find datatypes_conf.xml if it exists.
+ for root, dirs, files in os.walk( repo_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == 'datatypes_conf.xml':
+ datatypes_config = os.path.abspath( os.path.join( root, name ) )
+ break
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
+ # Find all special .sample files.
for root, dirs, files in os.walk( repo_dir ):
if root.find( '.hg' ) < 0:
for name in files:
if name.endswith( '.sample' ):
sample_files.append( os.path.abspath( os.path.join( root, name ) ) )
+ # Find all tool configs and exported workflows.
for root, dirs, files in os.walk( repo_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
if '.hg' in dirs:
- # Don't visit .hg directories - should be impossible since we don't
- # allow uploaded archives that contain .hg dirs, but just in case...
dirs.remove( '.hg' )
- if 'hgrc' in files:
- # Don't include hgrc files in commit.
- files.remove( 'hgrc' )
for name in files:
# Find all tool configs.
- if name.endswith( '.xml' ):
+ if name != 'datatypes_conf.xml' and name.endswith( '.xml' ):
try:
full_path = os.path.abspath( os.path.join( root, name ) )
tool = load_tool( trans, full_path )
@@ -373,11 +407,13 @@
# Find all special .sample files first.
for filename in ctx:
if filename.endswith( '.sample' ):
- sample_files.append( os.path.abspath( os.path.join( root, filename ) ) )
+ sample_files.append( os.path.abspath( filename ) )
# Get all tool config file names from the hgweb url, something like:
# /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
for filename in ctx:
- # Find all tool configs - should not have to update metadata for workflows for now.
+ # Find all tool configs - we do not have to update metadata for workflows or datatypes in anything
+ # but repository tips (handled above) since at the time this code was written, no workflows or
+ # dataytpes_conf.xml files exist in tool shed repositories, so they can only be added in future tips.
if filename.endswith( '.xml' ):
fctx = ctx[ filename ]
# Write the contents of the old tool config to a temporary file.
@@ -532,25 +568,41 @@
# The following will delete the disk copy of only the files in the repository.
#os.system( 'hg update -r null > /dev/null 2>&1' )
repo.ui.pushbuffer()
+ files_to_remove_from_disk = []
+ files_to_commit = []
commands.status( repo.ui, repo, all=True )
status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
- # status_and_file_names looks something like:
- # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
- # The codes used to show the status of files are:
- # M = modified
- # A = added
- # R = removed
- # C = clean
- # ! = deleted, but still tracked
- # ? = not tracked
- # I = ignored
- files_to_remove_from_disk = []
- files_to_commit = []
- for status_and_file_name in status_and_file_names:
- if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
- files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
- elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
- files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ if status_and_file_names and status_and_file_names[ 0 ] not in [ '' ]:
+ # status_and_file_names looks something like:
+ # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
+ # The codes used to show the status of files are:
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ for status_and_file_name in status_and_file_names:
+ if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
+ files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ # We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ cmd = 'hg status'
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stdout = open( tmp_name, 'wb' )
+ os.chdir( repo_dir )
+ proc = subprocess.Popen( args=cmd, shell=True, stdout=tmp_stdout.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stdout.close()
+ if returncode == 0:
+ for i, line in enumerate( open( tmp_name ) ):
+ if line.startswith( '?' ) or line.startswith( 'I' ):
+ files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
+ elif line.startswith( 'M' ) or line.startswith( 'A' ) or line.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
for full_path in files_to_remove_from_disk:
# We'll remove all files that are not tracked or ignored.
if os.path.isdir( full_path ):
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1007,17 +1007,19 @@
if repository_path.startswith( './' ):
repository_path = repository_path.replace( './', '', 1 )
entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
if os.path.exists( hgweb_config ):
# Make a backup of the hgweb.config file since we're going to be changing it.
self.__make_hgweb_config_copy( trans, hgweb_config )
- tmp_fname = tempfile.NamedTemporaryFile()
+ new_hgweb_config = open( tmp_fname, 'wb' )
for i, line in enumerate( open( hgweb_config ) ):
- tmp_fname.write( line )
+ new_hgweb_config.write( line )
else:
- tmp_fname.write( '[paths]\n' )
- tmp_fname.write( "%s\n" % entry )
- tmp_fname.flush()
- shutil.move( tmp_fname.name, os.path.abspath( hgweb_config ) )
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ new_hgweb_config.write( '[paths]\n' )
+ new_hgweb_config.write( "%s\n" % entry )
+ new_hgweb_config.flush()
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
# Change an entry in the hgweb.config file for a repository. This only happens when
# the owner changes the name of the repository. An entry looks something like:
@@ -1028,14 +1030,15 @@
repo_dir = repository.repo_path
old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
- tmp_fname = tempfile.NamedTemporaryFile()
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ new_hgweb_config = open( tmp_fname, 'wb' )
for i, line in enumerate( open( hgweb_config ) ):
if line.startswith( old_lhs ):
- tmp_fname.write( new_entry )
+ new_hgweb_config.write( new_entry )
else:
- tmp_fname.write( line )
- tmp_fname.flush()
- shutil.move( tmp_fname.name, os.path.abspath( hgweb_config ) )
+ new_hgweb_config.write( line )
+ new_hgweb_config.flush()
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -1,4 +1,4 @@
-import sys, os, shutil, logging, tarfile, tempfile
+import sys, os, shutil, logging, tarfile, tempfile, urllib
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
@@ -11,9 +11,6 @@
SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
CHUNK_SIZE = 2**20 # 1Mb
-class UploadError( Exception ):
- pass
-
class UploadController( BaseUIController ):
@web.expose
@web.require_login( 'upload', use_panels=True, webapp='community' )
@@ -32,20 +29,40 @@
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
upload_point = self.__get_upload_point( repository, **kwd )
- # Get the current repository tip.
tip = repository.tip
+ file_data = params.get( 'file_data', '' )
+ url = params.get( 'url', '' )
if params.get( 'upload_button', False ):
current_working_dir = os.getcwd()
- file_data = params.get( 'file_data', '' )
- if file_data == '':
+ if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
uploaded_file = None
+ elif url:
+ valid_url = True
+ try:
+ stream = urllib.urlopen( url )
+ except Exception, e:
+ valid_url = False
+ message = 'Error uploading file via http: %s' % str( e )
+ status = 'error'
+ uploaded_file = None
+ if valid_url:
+ fd, uploaded_file_name = tempfile.mkstemp()
+ uploaded_file = open( uploaded_file_name, 'wb' )
+ while 1:
+ chunk = stream.read( CHUNK_SIZE )
+ if not chunk:
+ break
+ uploaded_file.write( chunk )
+ uploaded_file.flush()
+ uploaded_file_filename = url.split( '/' )[ -1 ]
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
elif file_data not in ( '', None ):
uploaded_file = file_data.file
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
- isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
if uploaded_file:
isgzip = False
isbz2 = False
@@ -84,30 +101,32 @@
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
- # Move the uploaded file to the load_point within the repository hierarchy.
- shutil.move( uploaded_file_name, full_path )
- commands.add( repo.ui, repo, full_path )
- try:
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- except Exception, e:
- # I never have a problem with commands.commit on a Mac, but in the test/production
- # tool shed environment, it occasionally throws a "TypeError: array item must be char"
- # exception. If this happens, we'll try the following.
- repo.dirstate.write()
- repo.commit( user=trans.user.username, text=commit_message )
- if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample
- # file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary as well as
- # appending them to the shed's tool_data_table_conf.xml file on disk.
- error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
- if error:
- message = '%s<br/>%s' % ( message, error_message )
- if full_path.endswith( '.loc.sample' ):
- # Handle the special case where a xxx.loc.sample file is
- # being uploaded by copying it to ~/tool-data/xxx.loc.
- copy_sample_loc_file( trans, full_path )
- handle_email_alerts( trans, repository )
+ ok, message = self.__check_file_content( uploaded_file_name )
+ if ok:
+ # Move the uploaded file to the load_point within the repository hierarchy.
+ shutil.move( uploaded_file_name, full_path )
+ commands.add( repo.ui, repo, full_path )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
+ if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample
+ # file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary as well as
+ # appending them to the shed's tool_data_table_conf.xml file on disk.
+ error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
+ if error:
+ message = '%s<br/>%s' % ( message, error_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
+ handle_email_alerts( trans, repository )
if ok:
# Update the repository files for browsing.
update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
@@ -146,6 +165,7 @@
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/community/repository/upload.mako',
repository=repository,
+ url=url,
commit_message=commit_message,
uncompress_file=uncompress_file,
remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
@@ -172,6 +192,14 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
+ for filename_in_archive in filenames_in_archive:
+ if os.path.isfile( filename_in_archive ):
+ ok, message = self.__check_file_content( filename_in_archive )
+ if not ok:
+ # Refresh the repository files for browsing.
+ current_working_dir = os.getcwd()
+ update_for_browsing( trans, repository, current_working_dir )
+ return False, message, []
if remove_repo_files_not_in_tar and not repository.is_new:
# We have a repository that is not new (it contains files), so discover
# those files that are in the repository, but not in the uploaded archive.
@@ -314,4 +342,14 @@
message = "Uploaded archives cannot contain hgrc files."
return False, message
return True, ''
-
+ def __check_file_content( self, file_path ):
+ message = ''
+ ok = True
+ if check_html( file_path ):
+ message = 'Files containing HTML content cannot be uploaded to a Galaxy tool shed.'
+ ok = False
+ elif check_image( file_path ):
+ # For now we won't allow images to be uploaded.
+ message = 'Files containing images cannot be uploaded to a Galaxy tool shed.'
+ ok = False
+ return ok, message
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -83,7 +83,7 @@
hg clone <a href="${clone_str}">${clone_str}</a></%def>
-<%def name="render_repository_tools_and_workflows( repository_metadata_id, metadata, can_set_metadata=False, webapp='community' )">
+<%def name="render_repository_items( repository_metadata_id, metadata, can_set_metadata=False, webapp='community' )"><% from galaxy.webapps.community.controllers.common import encode, decode %>
%if metadata or can_set_metadata:
<p/>
@@ -195,6 +195,45 @@
</div><div style="clear: both"></div>
%endif
+ %if 'datatypes' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Data types</b></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% datatypes_dicts = metadata[ 'datatypes' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>extension</b></td>
+ <td><b>dtype</b></td>
+ <td><b>mimetype</b></td>
+ </tr>
+ %for datatypes_dict in datatypes_dicts:
+ <%
+ extension = datatypes_dict[ 'extension' ]
+ dtype = datatypes_dict[ 'dtype' ]
+ mimetype = datatypes_dict[ 'mimetype' ]
+ %>
+ <tr>
+ <td>${extension}</td>
+ <td>${dtype}</td>
+ <td>
+ %if mimetype:
+ ${mimetype}
+ %else:
+
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
%endif
%if can_set_metadata:
<form name="set_metadata" action="${h.url_for( controller='repository', action='set_metadata', id=trans.security.encode_id( repository.id ), ctx_str=changeset_revision )}" method="post">
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -184,7 +184,7 @@
</form></div></div>
-${render_repository_tools_and_workflows( repository_metadata_id, metadata, can_set_metadata=True )}
+${render_repository_items( repository_metadata_id, metadata, can_set_metadata=True )}
<p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div>
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/preview_tools_in_changeset.mako
--- a/templates/webapps/community/repository/preview_tools_in_changeset.mako
+++ b/templates/webapps/community/repository/preview_tools_in_changeset.mako
@@ -104,4 +104,4 @@
</div></div><p/>
-${render_repository_tools_and_workflows( repository_metadata_id, metadata, webapp=webapp )}
+${render_repository_items( repository_metadata_id, metadata, webapp=webapp )}
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/upload.mako
--- a/templates/webapps/community/repository/upload.mako
+++ b/templates/webapps/community/repository/upload.mako
@@ -73,7 +73,16 @@
</div><div style="clear: both"></div></div>
-
+ <div class="form-row">
+ <label>Url:</label>
+ <div class="form-row-input">
+ <input name="url" type="textfield" value="${url}" size="40"/>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Enter a URL to upload your files via http.
+ </div>
+ <div style="clear: both"></div>
+ </div><div class="form-row"><%
if uncompress_file:
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -186,7 +186,7 @@
%endif
</div></div>
-${render_repository_tools_and_workflows( repository_metadata_id, metadata, webapp=webapp )}
+${render_repository_items( repository_metadata_id, metadata, webapp=webapp )}
%if repository.categories:
<p/><div class="toolForm">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/128f167ce12b/
changeset: 128f167ce12b
user: dan
date: 2011-11-14 19:26:29
summary: Tool help updates.
affected #: 4 files
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/filters/secure_hash_message_digest.xml
--- a/tools/filters/secure_hash_message_digest.xml
+++ b/tools/filters/secure_hash_message_digest.xml
@@ -35,5 +35,11 @@
This tool outputs Secure Hashes / Message Digests of a dataset using the user selected algorithms.
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
</help></tool>
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/filters/wc_gnu.xml
--- a/tools/filters/wc_gnu.xml
+++ b/tools/filters/wc_gnu.xml
@@ -62,5 +62,11 @@
#lines words characters
7499 41376 624971
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
</help></tool>
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/peak_calling/ccat_wrapper.xml
--- a/tools/peak_calling/ccat_wrapper.xml
+++ b/tools/peak_calling/ccat_wrapper.xml
@@ -112,7 +112,7 @@
<output name="output_top_file" file="peakcalling_ccat/3.0/ccat_test_top_out_1.interval.sorted.re_match" compare="re_match" sort="true" /><output name="output_log_file" file="peakcalling_ccat/3.0/ccat_test_log_out_1.txt" /></test>
- <!-- Test below gives different results on different architectures,
+ <!-- Test below gives different answers on different architectures,
e.g.: x86_64 GNU/Linux gave an extra line (additional peak called) when compared to the version running on 10.6.0 Darwin i386
slidingWinSize was fixed to be 1000, default as per readme.txt
-->
@@ -140,6 +140,8 @@
**Citation**
+For the underlying tool, please cite `Xu H, Handoko L, Wei X, Ye C, Sheng J, Wei CL, Lin F, Sung WK. A signal-noise model for significance analysis of ChIP-seq with negative control. Bioinformatics. 2010 May 1;26(9):1199-204. <http://www.ncbi.nlm.nih.gov/pubmed/20371496>`_
+
If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
</help>
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/peak_calling/macs_wrapper.xml
--- a/tools/peak_calling/macs_wrapper.xml
+++ b/tools/peak_calling/macs_wrapper.xml
@@ -231,6 +231,8 @@
**Citation**
+For the underlying tool, please cite `Zhang Y, Liu T, Meyer CA, Eeckhoute J, Johnson DS, Bernstein BE, Nusbaum C, Myers RM, Brown M, Li W, Liu XS. Model-based analysis of ChIP-Seq (MACS). Genome Biol. 2008;9(9):R137. <http://www.ncbi.nlm.nih.gov/pubmed/18798982>`_
+
If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
</help>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Make history UI icons compatible with changes in 9dbc82483bd2.
by Bitbucket 14 Nov '11
by Bitbucket 14 Nov '11
14 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4a47e7247384/
changeset: 4a47e7247384
user: jgoecks
date: 2011-11-14 18:12:52
summary: Make history UI icons compatible with changes in 9dbc82483bd2.
affected #: 2 files
diff -r 7a72e5299fdc7b9723ca108130aab13951385f9b -r 4a47e724738420ce883ffb27b31e280c93ceefcd static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -555,7 +555,7 @@
});
// Generate 'collapse all' link
- $("#top-links > a.toggle").click( function() {
+ $("#top-links > a.toggle-contract").click( function() {
var prefs = $.jStorage.get("history_expand_state");
if (!prefs) { prefs = {}; }
$( "div.historyItemBody:visible" ).each( function() {
diff -r 7a72e5299fdc7b9723ca108130aab13951385f9b -r 4a47e724738420ce883ffb27b31e280c93ceefcd templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -432,7 +432,7 @@
<div id="top-links" class="historyLinks"><a title="${_('refresh')}" class="icon-button arrow-circle tooltip" href="${h.url_for('history', show_deleted=show_deleted)}"></a>
- <a title='${_('collapse all')}' class='icon-button toggle tooltip' href='#' style="display: none;"></a>
+ <a title='${_('collapse all')}' class='icon-button toggle-contract tooltip' href='#' style="display: none"></a>
%if trans.get_user():
<div style="width: 40px; float: right; white-space: nowrap;">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

14 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7a72e5299fdc/
changeset: 7a72e5299fdc
user: dan
date: 2011-11-14 17:37:02
summary: Fixes for Data source async controller.
affected #: 2 files
diff -r c3a92d8ebf3d2aee1828f0f9187f860bbc01473f -r 7a72e5299fdc7b9723ca108130aab13951385f9b lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -591,7 +591,7 @@
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = context['stdout'] + context['stderr']
+ dataset.info = ( dataset.info or '' ) + context['stdout'] + context['stderr']
dataset.tool_version = self.version_string
dataset.set_size()
if context['stderr']:
diff -r c3a92d8ebf3d2aee1828f0f9187f860bbc01473f -r 7a72e5299fdc7b9723ca108130aab13951385f9b lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py
+++ b/lib/galaxy/web/controllers/async.py
@@ -67,7 +67,7 @@
trans.log_event( 'Async executing tool %s' % tool.id, tool_id=tool.id )
galaxy_url = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
galaxy_url = params.get("GALAXY_URL",galaxy_url)
- params = dict( url=URL, GALAXY_URL=galaxy_url )
+ params = dict( URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext )
# Assume there is exactly one output file possible
params[tool.outputs.keys()[0]] = data.id
tool.execute( trans, incoming=params )
@@ -80,20 +80,20 @@
trans.sa_session.flush()
return "Data %s with status %s received. OK" % (data_id, STATUS)
-
- #
- # no data_id must be parameter submission
- #
- if not data_id and len(params)>3:
-
- if params.galaxyFileFormat == 'wig':
+ else:
+ #
+ # no data_id must be parameter submission
+ #
+ if params.data_type:
+ GALAXY_TYPE = params.data_type
+ elif params.galaxyFileFormat == 'wig': #this is an undocumented legacy special case
GALAXY_TYPE = 'wig'
else:
- GALAXY_TYPE = params.GALAXY_TYPE or 'interval'
+ GALAXY_TYPE = params.GALAXY_TYPE or tool.outputs.values()[0].format
- GALAXY_NAME = params.GALAXY_NAME or '%s query' % tool.name
- GALAXY_INFO = params.GALAXY_INFO or params.galaxyDescription or ''
- GALAXY_BUILD = params.GALAXY_BUILD or params.galaxyFreeze or 'hg17'
+ GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
+ GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
+ GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'
#data = datatypes.factory(ext=GALAXY_TYPE)()
#data.ext = GALAXY_TYPE
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix for renaming a tool shed repository - hopefully resolves race condition.
by Bitbucket 14 Nov '11
by Bitbucket 14 Nov '11
14 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c3a92d8ebf3d/
changeset: c3a92d8ebf3d
user: greg
date: 2011-11-14 16:49:51
summary: Fix for renaming a tool shed repository - hopefully resolves race condition.
affected #: 1 file
diff -r 744902bbb2b94202f361f962f626e95a2bc18111 -r c3a92d8ebf3d2aee1828f0f9187f860bbc01473f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1,4 +1,4 @@
-import os, logging, urllib, ConfigParser, tempfile, shutil
+import os, logging, tempfile, shutil
from time import strftime
from datetime import date, datetime
from galaxy import util
@@ -1001,20 +1001,23 @@
hgweb_config_copy = '%s/hgweb.config_%s_backup' % ( trans.app.config.root, backup_date )
shutil.copy( os.path.abspath( hgweb_config ), os.path.abspath( hgweb_config_copy ) )
def __add_hgweb_config_entry( self, trans, repository, repository_path ):
- # Add an entry in the hgweb.config file for a new repository.
- # An entry looks something like:
+ # Add an entry in the hgweb.config file for a new repository. An entry looks something like:
# repos/test/mira_assembler = database/community_files/000/repo_123.
hgweb_config = "%s/hgweb.config" % trans.app.config.root
- # Make a backup of the hgweb.config file since we're going to be changing it.
- self.__make_hgweb_config_copy( trans, hgweb_config )
+ if repository_path.startswith( './' ):
+ repository_path = repository_path.replace( './', '', 1 )
entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
if os.path.exists( hgweb_config ):
- output = open( hgweb_config, 'a' )
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
+ tmp_fname = tempfile.NamedTemporaryFile()
+ for i, line in enumerate( open( hgweb_config ) ):
+ tmp_fname.write( line )
else:
- output = open( hgweb_config, 'w' )
- output.write( '[paths]\n' )
- output.write( "%s\n" % entry )
- output.close()
+ tmp_fname.write( '[paths]\n' )
+ tmp_fname.write( "%s\n" % entry )
+ tmp_fname.flush()
+ shutil.move( tmp_fname.name, os.path.abspath( hgweb_config ) )
def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
# Change an entry in the hgweb.config file for a repository. This only happens when
# the owner changes the name of the repository. An entry looks something like:
@@ -1024,16 +1027,15 @@
self.__make_hgweb_config_copy( trans, hgweb_config )
repo_dir = repository.repo_path
old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
- old_entry = "%s = %s" % ( old_lhs, repo_dir )
new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
- tmp_fd, tmp_fname = tempfile.mkstemp()
- new_hgweb_config = open( tmp_fname, 'wb' )
+ tmp_fname = tempfile.NamedTemporaryFile()
for i, line in enumerate( open( hgweb_config ) ):
if line.startswith( old_lhs ):
- new_hgweb_config.write( new_entry )
+ tmp_fname.write( new_entry )
else:
- new_hgweb_config.write( line )
- shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
+ tmp_fname.write( line )
+ tmp_fname.flush()
+ shutil.move( tmp_fname.name, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
@@ -1306,7 +1308,7 @@
if params.get( 'edit_repository_button', False ):
flush_needed = False
# TODO: add a can_manage in the security agent.
- if user != repository.user:
+ if user != repository.user or not trans.user_is_admin():
message = "You are not the owner of this repository, so you cannot manage it."
status = error
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -1315,6 +1317,12 @@
webapp='community',
message=message,
status=status ) )
+ if description != repository.description:
+ repository.description = description
+ flush_needed = True
+ if long_description != repository.long_description:
+ repository.long_description = long_description
+ flush_needed = True
if repo_name != repository.name:
message = self.__validate_repository_name( repo_name, user )
if message:
@@ -1323,12 +1331,6 @@
self.__change_hgweb_config_entry( trans, repository, repository.name, repo_name )
repository.name = repo_name
flush_needed = True
- if description != repository.description:
- repository.description = description
- flush_needed = True
- if long_description != repository.long_description:
- repository.long_description = long_description
- flush_needed = True
if flush_needed:
trans.sa_session.add( repository )
trans.sa_session.flush()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: fubar: Bumped ALL picard tool versions and made them uniformly 1.56.0 to reflect the current picard tools version Nate's about to unleash.
by Bitbucket 11 Nov '11
by Bitbucket 11 Nov '11
11 Nov '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/744902bbb2b9/
changeset: 744902bbb2b9
user: fubar
date: 2011-11-11 18:25:19
summary: Bumped ALL picard tool versions and made them uniformly 1.56.0 to reflect the current picard tools version Nate's about to unleash.
affected #: 14 files
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_AddOrReplaceReadGroups.xml
--- a/tools/picard/picard_AddOrReplaceReadGroups.xml
+++ b/tools/picard/picard_AddOrReplaceReadGroups.xml
@@ -1,4 +1,4 @@
-<tool name="Add or Replace Groups" id="picard_ARRG" version="0.2.1">
+<tool name="Add or Replace Groups" id="picard_ARRG" version="1.56.0"><requirements><requirement type="package">picard</requirement></requirements><command interpreter="python">
picard_wrapper.py
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_BamIndexStats.xml
--- a/tools/picard/picard_BamIndexStats.xml
+++ b/tools/picard/picard_BamIndexStats.xml
@@ -1,4 +1,4 @@
-<tool name="BAM Index Statistics" id="picard_BamIndexStats" version="0.2.1">
+<tool name="BAM Index Statistics" id="picard_BamIndexStats" version="1.56.0"><requirements><requirement type="package">picard</requirement></requirements><command interpreter="python">
picard_wrapper.py
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_FastqToSam.xml
--- a/tools/picard/picard_FastqToSam.xml
+++ b/tools/picard/picard_FastqToSam.xml
@@ -1,4 +1,4 @@
-<tool id="picard_FastqToSam" name="FASTQ to BAM" version="0.0.1">
+<tool id="picard_FastqToSam" name="FASTQ to BAM" version="1.56.0"><description>creates an unaligned BAM file</description><requirements><requirement type="package">picard</requirement></requirements><command>java -XX:DefaultMaxRAMFraction=1 -XX:+UseParallelGC
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_MarkDuplicates.xml
--- a/tools/picard/picard_MarkDuplicates.xml
+++ b/tools/picard/picard_MarkDuplicates.xml
@@ -1,4 +1,4 @@
-<tool name="Mark Duplicates" id="picard_MarkDuplicates" version="0.01.1">
+<tool name="Mark Duplicates" id="picard_MarkDuplicates" version="1.56.0"><command interpreter="python">
picard_wrapper.py
--input="$input_file"
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_ReorderSam.xml
--- a/tools/picard/picard_ReorderSam.xml
+++ b/tools/picard/picard_ReorderSam.xml
@@ -1,4 +1,4 @@
-<tool name="Reorder SAM/BAM" id="picard_ReorderSam" version="0.3.1">
+<tool name="Reorder SAM/BAM" id="picard_ReorderSam" version="1.56.0"><requirements><requirement type="package">picard</requirement></requirements><command interpreter="python">
picard_wrapper.py
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_ReplaceSamHeader.xml
--- a/tools/picard/picard_ReplaceSamHeader.xml
+++ b/tools/picard/picard_ReplaceSamHeader.xml
@@ -1,4 +1,4 @@
-<tool name="Replace SAM/BAM Header" id="picard_ReplaceSamHeader" version="0.2.1">
+<tool name="Replace SAM/BAM Header" id="picard_ReplaceSamHeader" version="1.56.0"><requirements><requirement type="package">picard</requirement></requirements><command interpreter="python">
picard_wrapper.py
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/picard_SamToFastq.xml
--- a/tools/picard/picard_SamToFastq.xml
+++ b/tools/picard/picard_SamToFastq.xml
@@ -1,4 +1,4 @@
-<tool id="picard_SamToFastq" name="SAM to FASTQ" version="0.0.1">
+<tool id="picard_SamToFastq" name="SAM to FASTQ" version="1.56.0"><description>creates a FASTQ file</description><requirements><requirement type="package">picard</requirement></requirements><command>java -XX:DefaultMaxRAMFraction=1 -XX:+UseParallelGC
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardASMetrics.xml
--- a/tools/picard/rgPicardASMetrics.xml
+++ b/tools/picard/rgPicardASMetrics.xml
@@ -1,4 +1,4 @@
-<tool name="SAM/BAM Alignment Summary Metrics" id="PicardASMetrics" version="0.03.1">
+<tool name="SAM/BAM Alignment Summary Metrics" id="PicardASMetrics" version="1.56.0"><command interpreter="python">
picard_wrapper.py -i "$input_file" -d "$html_file.files_path" -t "$html_file"
--assumesorted "$sorted" -b "$bisulphite" --adaptors "$adaptors" --maxinsert "$maxinsert" -n "$out_prefix" --datatype "$input_file.ext"
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardFixMate.xml
--- a/tools/picard/rgPicardFixMate.xml
+++ b/tools/picard/rgPicardFixMate.xml
@@ -1,4 +1,4 @@
-<tool name="Paired Read Mate Fixer" id="rgPicFixMate" version="0.2.1">
+<tool name="Paired Read Mate Fixer" id="rgPicFixMate" version="1.56.0"><description>for paired data</description><command interpreter="python">
picard_wrapper.py -i "$input_file" -o "$out_file" --tmpdir "${__new_file_path__}" -n "$out_prefix"
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardGCBiasMetrics.xml
--- a/tools/picard/rgPicardGCBiasMetrics.xml
+++ b/tools/picard/rgPicardGCBiasMetrics.xml
@@ -1,4 +1,4 @@
-<tool name="SAM/BAM GC Bias Metrics" id="PicardGCBiasMetrics" version="0.02.1">
+<tool name="SAM/BAM GC Bias Metrics" id="PicardGCBiasMetrics" version="1.56.0"><command interpreter="python">
picard_wrapper.py -i "$input_file" -d "$html_file.files_path" -t "$html_file"
--windowsize "$windowsize" --mingenomefrac "$mingenomefrac" -n "$out_prefix" --tmpdir "${__new_file_path__}"
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardHsMetrics.xml
--- a/tools/picard/rgPicardHsMetrics.xml
+++ b/tools/picard/rgPicardHsMetrics.xml
@@ -1,4 +1,4 @@
-<tool name="SAM/BAM Hybrid Selection Metrics" id="PicardHsMetrics" version="0.02.1">
+<tool name="SAM/BAM Hybrid Selection Metrics" id="PicardHsMetrics" version="1.56.0"><description>for targeted resequencing data</description><command interpreter="python">
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardInsertSize.xml
--- a/tools/picard/rgPicardInsertSize.xml
+++ b/tools/picard/rgPicardInsertSize.xml
@@ -1,4 +1,4 @@
-<tool name="Insertion size metrics" id="PicardInsertSize" version="0.3.2">
+<tool name="Insertion size metrics" id="PicardInsertSize" version="1.56.0"><description>for PAIRED data</description><requirements><requirement type="package">picard</requirement></requirements><command interpreter="python">
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardLibComplexity.xml
--- a/tools/picard/rgPicardLibComplexity.xml
+++ b/tools/picard/rgPicardLibComplexity.xml
@@ -1,4 +1,4 @@
-<tool name="Estimate Library Complexity" id="rgEstLibComp" version="0.01.1">
+<tool name="Estimate Library Complexity" id="rgEstLibComp" version="1.56.0"><command interpreter="python">
picard_wrapper.py -i "$input_file" -n "$out_prefix" --tmpdir "${__new_file_path__}" --minid "$minIDbases"
--maxdiff "$maxDiff" --minmeanq "$minMeanQ" --readregex "$readRegex" --optdupdist "$optDupeDist"
diff -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd -r 744902bbb2b94202f361f962f626e95a2bc18111 tools/picard/rgPicardMarkDups.xml
--- a/tools/picard/rgPicardMarkDups.xml
+++ b/tools/picard/rgPicardMarkDups.xml
@@ -1,4 +1,4 @@
-<tool name="Mark Duplicate reads" id="rgPicardMarkDups" version="0.01.1">
+<tool name="Mark Duplicate reads" id="rgPicardMarkDups" version="1.56.0"><command interpreter="python">
picard_wrapper.py -i "$input_file" -n "$out_prefix" --tmpdir "${__new_file_path__}" -o "$out_file"
--remdups "$remDups" --assumesorted "$assumeSorted" --readregex "$readRegex" --optdupdist "$optDupeDist"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a807ed5389f4/
changeset: a807ed5389f4
user: fubar
date: 2011-11-10 22:23:15
summary: Updates for picard tools 1.56 - they changed the params for CollectInsertSizeMetrics.jar - bless them - so
you will need to update your local picard install in tool-data/shared/jars/picard
Fixes to picard_wrapper.py so if a picard tool returns a non zero exit code, it will raise an error state.
Sadly, this doesn't help the CollectInsertSizeMetrics problem it was meant to address - it will die
with no outputs if given an input containing unpaired reads - but the exit code is still zero.
Talk about irony.
At least it should work right if they ever get around to setting exit codes to indicate a problem.
affected #: 6 files
diff -r 6ec2d7f4a64dcf6a1c49415bd8f40c8d4ca907c5 -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 test-data/picard_input_sorted_pair.sam
--- a/test-data/picard_input_sorted_pair.sam
+++ b/test-data/picard_input_sorted_pair.sam
@@ -5,11 +5,11 @@
@RG ID:rg1 SM:Z
bar:record:1 77 chr1 10 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
bar:record:1 141 chr1 20 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
-bar:record:2 77 chr2 10 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
-bar:record:2 141 chr2 30 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
-bar:record:3 77 chr1 10 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
-bar:record:3 141 chr3 20 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
-bar:record:4 77 chr1 1 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
-bar:record:4 141 chr1 40 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
-bar:record:5 77 chr1 40 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
-bar:record:5 141 chr3 40 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
+bar:record:2 77 chr1 40 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
+bar:record:2 141 chr1 50 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
+bar:record:3 77 chr2 10 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
+bar:record:3 141 chr2 20 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
+bar:record:4 77 chr2 50 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
+bar:record:4 141 chr2 60 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
+bar:record:5 77 chr3 40 0 * * 0 0 AAAAAAAAAAAAA 1111111111111 RG:Z:rg1
+bar:record:5 141 chr3 50 0 * * 0 0 CCCCCCCCCCCCC 2222222222222 RG:Z:rg1
diff -r 6ec2d7f4a64dcf6a1c49415bd8f40c8d4ca907c5 -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 test-data/picard_output_AsMetrics_indexed_hg18_sorted_pair.html
--- a/test-data/picard_output_AsMetrics_indexed_hg18_sorted_pair.html
+++ b/test-data/picard_output_AsMetrics_indexed_hg18_sorted_pair.html
@@ -6,28 +6,51 @@
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-<meta name="generator" content="Galaxy CollectAlignmentSummaryMetrics tool output - see http://getgalaxy.org/" />
+<meta name="generator" content="Galaxy picard_wrapper tool output - see http://getgalaxy.org/" /><title></title><link rel="stylesheet" href="/static/style/base.css" type="text/css" /></head><body><div class="document">
-Galaxy tool wrapper picard_wrapper at 09/05/2011 11:03:57</b><br/><b>The following output files were created (click the filename to view/download a copy):</b><hr/><table>
+Galaxy tool CollectAlignmentSummaryMetrics run at 11/11/2011 08:07:27</b><br/><b>The following output files were created (click the filename to view/download a copy):</b><hr/><table><tr><td><a href="CollectAlignmentSummaryMetrics.log">CollectAlignmentSummaryMetrics.log</a></td></tr>
+<tr><td><a href="CollectAlignmentSummaryMetrics.metrics.txt">CollectAlignmentSummaryMetrics.metrics.txt</a></td></tr></table><p/>
-<b>Picard log</b><hr/>
-<pre>## executing java -Xmx2g -jar /udd/rerla/galaxy-central/tool-data/shared/jars/CollectAlignmentSummaryMetrics.jar VALIDATION_STRINGENCY=LENIENT ASSUME_SORTED=true ADAPTER_SEQUENCE= IS_BISULFITE_SEQUENCED=false MAX_INSERT_SIZE=100000 OUTPUT=/udd/rerla/galaxy-central/database/job_working_directory/5/dataset_5_files/CollectAlignmentSummaryMetrics.metrics.txt R=/udd/rerla/galaxy-central/database/job_working_directory/5/dataset_5_files/hg19.fasta_fake.fasta TMP_DIR=/tmp INPUT=/export/tmp/tmpBrCiH5/database/files/000/dataset_4.dat returned status 1 and stderr:
-[Mon May 09 11:03:51 EDT 2011] net.sf.picard.analysis.CollectAlignmentSummaryMetrics MAX_INSERT_SIZE=100000 ADAPTER_SEQUENCE=[AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCGGTTCAGCAGGAATGCCGAGACCGATCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCACACGTCTGAACTCCAGTCACNNNNNNNNATCTCGTATGCCGTCTTCTGCTTG, IS_BISULFITE_SEQUENCED=false] INPUT=/export/tmp/tmpBrCiH5/database/files/000/dataset_4.dat OUTPUT=/udd/rerla/galaxy-central/database/job_working_directory/5/dataset_5_files/CollectAlignmentSummaryMetrics.metrics.txt REFERENCE_SEQUENCE=/udd/rerla/galaxy-central/database/job_working_directory/5/dataset_5_files/hg19.fasta_fake.fasta ASSUME_SORTED=true TMP_DIR=/tmp VALIDATION_STRINGENCY=LENIENT IS_BISULFITE_SEQUENCED=false STOP_AFTER=0 VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
-[Mon May 09 11:03:57 EDT 2011] net.sf.picard.analysis.CollectAlignmentSummaryMetrics done.
-Runtime.totalMemory()=912588800
-Exception in thread "main" net.sf.picard.PicardException: Requesting earlier reference sequence: 0 < 1
- at net.sf.picard.reference.ReferenceSequenceFileWalker.get(ReferenceSequenceFileWalker.java:78)
- at net.sf.picard.analysis.SinglePassSamProgram.makeItSo(SinglePassSamProgram.java:115)
- at net.sf.picard.analysis.SinglePassSamProgram.doWork(SinglePassSamProgram.java:54)
- at net.sf.picard.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:157)
- at net.sf.picard.cmdline.CommandLineProgram.instanceMainWithExit(CommandLineProgram.java:117)
- at net.sf.picard.analysis.CollectAlignmentSummaryMetrics.main(CollectAlignmentSummaryMetrics.java:106)
-
+<b>Picard on line resources</b><ul>
+<li><a href="http://picard.sourceforge.net/index.shtml">Click here for Picard Documentation</a></li>
+<li><a href="http://picard.sourceforge.net/picard-metric-definitions.shtml">Click here for Picard Metrics definitions</a></li></ul><hr/>
+<b>Picard output (transposed to make it easier to see)</b><hr/>
+<table cellpadding="3" >
+<tr class="d0"><td colspan="2">## net.sf.picard.metrics.StringHeader</td></tr><tr class="d1"><td colspan="2"># net.sf.picard.analysis.CollectAlignmentSummaryMetrics MAX_INSERT_SIZE=100000 ADAPTER_SEQUENCE=[AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCGGTTCAGCAGGAATGCCGAGACCGATCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCACACGTCTGAACTCCAGTCACNNNNNNNNATCTCGTATGCCGTCTTCTGCTTG, IS_BISULFITE_SEQUENCED=false] INPUT=/data/tmp/tmpLLcl1w/database/files/000/dataset_4.dat OUTPUT=/data/home/rlazarus/galaxy/database/job_working_directory/5/dataset_5_files/CollectAlignmentSummaryMetrics.metrics.txt REFERENCE_SEQUENCE=/data/home/rlazarus/galaxy/database/job_working_directory/5/dataset_5_files/hg19.fa_fake.fasta ASSUME_SORTED=true TMP_DIR=[/tmp] VALIDATION_STRINGENCY=LENIENT METRIC_ACCUMULATION_LEVEL=[ALL_READS] IS_BISULFITE_SEQUENCED=false STOP_AFTER=0 VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false</td></tr><tr class="d0"><td colspan="2">## net.sf.picard.metrics.StringHeader</td></tr><tr class="d1"><td colspan="2"># Started on: Fri Nov 11 08:07:22 EST 2011</td></tr><tr class="d0"><td colspan="2">## METRICS CLASS net.sf.picard.analysis.AlignmentSummaryMetrics</td></tr><tr class="d0"><td>CATEGORY</td><td>FIRST_OF_PAIR </td></tr>
+<tr class="d1"><td>TOTAL_READS</td><td>5 </td></tr>
+<tr class="d0"><td>PF_READS</td><td>5 </td></tr>
+<tr class="d1"><td>PCT_PF_READS</td><td>1 </td></tr>
+<tr class="d0"><td>PF_NOISE_READS</td><td>0 </td></tr>
+<tr class="d1"><td>PF_READS_ALIGNED</td><td>0 </td></tr>
+<tr class="d0"><td>PCT_PF_READS_ALIGNED</td><td>0 </td></tr>
+<tr class="d1"><td>PF_ALIGNED_BASES</td><td>0 </td></tr>
+<tr class="d0"><td>PF_HQ_ALIGNED_READS</td><td>0 </td></tr>
+<tr class="d1"><td>PF_HQ_ALIGNED_BASES</td><td>0 </td></tr>
+<tr class="d0"><td>PF_HQ_ALIGNED_Q20_BASES</td><td>0 </td></tr>
+<tr class="d1"><td>PF_HQ_MEDIAN_MISMATCHES</td><td>0 </td></tr>
+<tr class="d0"><td>PF_MISMATCH_RATE</td><td>0 </td></tr>
+<tr class="d1"><td>PF_HQ_ERROR_RATE</td><td>0 </td></tr>
+<tr class="d0"><td>PF_INDEL_RATE</td><td>0 </td></tr>
+<tr class="d1"><td>MEAN_READ_LENGTH</td><td>13 </td></tr>
+<tr class="d0"><td>READS_ALIGNED_IN_PAIRS</td><td>0 </td></tr>
+<tr class="d1"><td>PCT_READS_ALIGNED_IN_PAIRS</td><td>0 </td></tr>
+<tr class="d0"><td>BAD_CYCLES</td><td>0 </td></tr>
+<tr class="d1"><td>STRAND_BALANCE</td><td>0 </td></tr>
+<tr class="d0"><td>PCT_CHIMERAS</td><td>0 </td></tr>
+<tr class="d1"><td>PCT_ADAPTER</td><td>0 </td></tr>
+<tr class="d0"><td>SAMPLE</td><td> </td></tr>
+<tr class="d1"><td>LIBRARY</td><td> </td></tr>
+<tr class="d0"><td>READ_GROUP
+</td><td>
+ </td></tr>
+</table>
+<b>Picard Tool Run Log</b><hr/>
+<pre>INFO:root:## executing java -Xmx4g -jar /data/home/rlazarus/galaxy/tool-data/shared/jars/picard/CollectAlignmentSummaryMetrics.jar VALIDATION_STRINGENCY=LENIENT ASSUME_SORTED=true ADAPTER_SEQUENCE= IS_BISULFITE_SEQUENCED=false MAX_INSERT_SIZE=100000 OUTPUT=/data/home/rlazarus/galaxy/database/job_working_directory/5/dataset_5_files/CollectAlignmentSummaryMetrics.metrics.txt R=/data/home/rlazarus/galaxy/database/job_working_directory/5/dataset_5_files/hg19.fa_fake.fasta TMP_DIR=/tmp INPUT=/data/tmp/tmpLLcl1w/database/files/000/dataset_4.dat returned status 0 and nothing on stderr
</pre><hr/>The freely available <a href="http://picard.sourceforge.net/command-line-overview.shtml">Picard software</a>
generated all outputs reported here running as a <a href="http://getgalaxy.org">Galaxy</a> tool</div></body></html>
diff -r 6ec2d7f4a64dcf6a1c49415bd8f40c8d4ca907c5 -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 test-data/picard_output_alignment_summary_metrics.html
--- a/test-data/picard_output_alignment_summary_metrics.html
+++ b/test-data/picard_output_alignment_summary_metrics.html
@@ -12,7 +12,7 @@
</head><body><div class="document">
-Galaxy tool CollectAlignmentSummaryMetrics run at 11/05/2011 23:16:24</b><br/><b>The following output files were created (click the filename to view/download a copy):</b><hr/><table>
+Galaxy tool CollectAlignmentSummaryMetrics run at 11/11/2011 08:07:10</b><br/><b>The following output files were created (click the filename to view/download a copy):</b><hr/><table><tr><td><a href="CollectAlignmentSummaryMetrics.log">CollectAlignmentSummaryMetrics.log</a></td></tr><tr><td><a href="CollectAlignmentSummaryMetrics.metrics.txt">CollectAlignmentSummaryMetrics.metrics.txt</a></td></tr></table><p/>
@@ -21,43 +21,38 @@
<li><a href="http://picard.sourceforge.net/picard-metric-definitions.shtml">Click here for Picard Metrics definitions</a></li></ul><hr/><b>Picard output (transposed to make it easier to see)</b><hr/><table cellpadding="3" >
-<tr class="d0"><td colspan="2">## net.sf.picard.metrics.StringHeader</td></tr><tr class="d1"><td colspan="2"># net.sf.picard.analysis.CollectAlignmentSummaryMetrics MAX_INSERT_SIZE=100000 ADAPTER_SEQUENCE=[AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCGGTTCAGCAGGAATGCCGAGACCGATCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCACACGTCTGAACTCCAGTCACNNNNNNNNATCTCGTATGCCGTCTTCTGCTTG, IS_BISULFITE_SEQUENCED=false] INPUT=/export/tmp/tmp1-mt_l/database/files/000/dataset_2.dat OUTPUT=/udd/rerla/galaxy-central/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetrics.metrics.txt REFERENCE_SEQUENCE=/udd/rerla/galaxy-central/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetricsZJS8q6.fasta_fake.fasta ASSUME_SORTED=true TMP_DIR=/tmp VALIDATION_STRINGENCY=LENIENT IS_BISULFITE_SEQUENCED=false STOP_AFTER=0 VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false</td></tr><tr class="d0"><td colspan="2">## net.sf.picard.metrics.StringHeader</td></tr><tr class="d1"><td colspan="2"># Started on: Wed May 11 23:16:24 EDT 2011</td></tr><tr class="d0"><td colspan="2">## METRICS CLASS net.sf.picard.analysis.AlignmentSummaryMetrics</td></tr><tr class="d0"><td>CATEGORY</td><td>FIRST_OF_PAIR </td></tr>
+<tr class="d0"><td colspan="2">## net.sf.picard.metrics.StringHeader</td></tr><tr class="d1"><td colspan="2"># net.sf.picard.analysis.CollectAlignmentSummaryMetrics MAX_INSERT_SIZE=100000 ADAPTER_SEQUENCE=[AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCGGTTCAGCAGGAATGCCGAGACCGATCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCACACGTCTGAACTCCAGTCACNNNNNNNNATCTCGTATGCCGTCTTCTGCTTG, IS_BISULFITE_SEQUENCED=false] INPUT=/data/tmp/tmpLLcl1w/database/files/000/dataset_2.dat OUTPUT=/data/home/rlazarus/galaxy/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetrics.metrics.txt REFERENCE_SEQUENCE=/data/home/rlazarus/galaxy/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetricsfq2hit.fasta_fake.fasta ASSUME_SORTED=true TMP_DIR=[/tmp] VALIDATION_STRINGENCY=LENIENT METRIC_ACCUMULATION_LEVEL=[ALL_READS] IS_BISULFITE_SEQUENCED=false STOP_AFTER=0 VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false</td></tr><tr class="d0"><td colspan="2">## net.sf.picard.metrics.StringHeader</td></tr><tr class="d1"><td colspan="2"># Started on: Fri Nov 11 08:07:10 EST 2011</td></tr><tr class="d0"><td colspan="2">## METRICS CLASS net.sf.picard.analysis.AlignmentSummaryMetrics</td></tr><tr class="d0"><td>CATEGORY</td><td>FIRST_OF_PAIR </td></tr><tr class="d1"><td>TOTAL_READS</td><td>4 </td></tr><tr class="d0"><td>PF_READS</td><td>4 </td></tr><tr class="d1"><td>PCT_PF_READS</td><td>1 </td></tr><tr class="d0"><td>PF_NOISE_READS</td><td>0 </td></tr><tr class="d1"><td>PF_READS_ALIGNED</td><td>4 </td></tr><tr class="d0"><td>PCT_PF_READS_ALIGNED</td><td>1 </td></tr>
-<tr class="d1"><td>PF_HQ_ALIGNED_READS</td><td>4 </td></tr>
-<tr class="d0"><td>PF_HQ_ALIGNED_BASES</td><td>404 </td></tr>
-<tr class="d1"><td>PF_HQ_ALIGNED_Q20_BASES</td><td>28 </td></tr>
-<tr class="d0"><td>PF_HQ_MEDIAN_MISMATCHES</td><td>78 </td></tr>
+<tr class="d1"><td>PF_ALIGNED_BASES</td><td>404 </td></tr>
+<tr class="d0"><td>PF_HQ_ALIGNED_READS</td><td>4 </td></tr>
+<tr class="d1"><td>PF_HQ_ALIGNED_BASES</td><td>404 </td></tr>
+<tr class="d0"><td>PF_HQ_ALIGNED_Q20_BASES</td><td>28 </td></tr>
+<tr class="d1"><td>PF_HQ_MEDIAN_MISMATCHES</td><td>78 </td></tr>
+<tr class="d0"><td>PF_MISMATCH_RATE</td><td>0.777228 </td></tr><tr class="d1"><td>PF_HQ_ERROR_RATE</td><td>0.777228 </td></tr>
-<tr class="d0"><td>MEAN_READ_LENGTH</td><td>101 </td></tr>
-<tr class="d1"><td>READS_ALIGNED_IN_PAIRS</td><td>3 </td></tr>
-<tr class="d0"><td>PCT_READS_ALIGNED_IN_PAIRS</td><td>0.75 </td></tr>
-<tr class="d1"><td>BAD_CYCLES</td><td>63 </td></tr>
-<tr class="d0"><td>STRAND_BALANCE</td><td>0.25 </td></tr>
-<tr class="d1"><td>PCT_CHIMERAS</td><td>0 </td></tr>
-<tr class="d0"><td>PCT_ADAPTER
-</td><td>0
+<tr class="d0"><td>PF_INDEL_RATE</td><td>0 </td></tr>
+<tr class="d1"><td>MEAN_READ_LENGTH</td><td>101 </td></tr>
+<tr class="d0"><td>READS_ALIGNED_IN_PAIRS</td><td>3 </td></tr>
+<tr class="d1"><td>PCT_READS_ALIGNED_IN_PAIRS</td><td>0.75 </td></tr>
+<tr class="d0"><td>BAD_CYCLES</td><td>63 </td></tr>
+<tr class="d1"><td>STRAND_BALANCE</td><td>0.25 </td></tr>
+<tr class="d0"><td>PCT_CHIMERAS</td><td>0 </td></tr>
+<tr class="d1"><td>PCT_ADAPTER</td><td>0 </td></tr>
+<tr class="d0"><td>SAMPLE</td><td> </td></tr>
+<tr class="d1"><td>LIBRARY</td><td> </td></tr>
+<tr class="d0"><td>READ_GROUP
+</td><td>
</td></tr></table><b>Picard Tool Run Log</b><hr/>
-<pre>Wed, 11 May 2011 23:16:24 INFO
- ## executing java -Xmx2g -jar /udd/rerla/galaxy-central/tool-data/shared/jars/CreateSequenceDictionary.jar REFERENCE=/tmp/CollectAlignmentSummaryMetricsZJS8q6.fasta OUTPUT=/tmp/CollectAlignmentSummaryMetricsZJS8q6.dict URI=dataset_1.dat TRUNCATE_NAMES_AT_WHITESPACE=None returned status 0 and stderr:
-[Wed May 11 23:16:24 EDT 2011] net.sf.picard.sam.CreateSequenceDictionary REFERENCE=/tmp/CollectAlignmentSummaryMetricsZJS8q6.fasta OUTPUT=/tmp/CollectAlignmentSummaryMetricsZJS8q6.dict URI=dataset_1.dat TRUNCATE_NAMES_AT_WHITESPACE=false NUM_SEQUENCES=2147483647 TMP_DIR=/tmp/rerla VERBOSITY=INFO QUIET=false VALIDATION_STRINGENCY=STRICT COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
-[Wed May 11 23:16:24 EDT 2011] net.sf.picard.sam.CreateSequenceDictionary done.
-Runtime.totalMemory()=9109504
+<pre>INFO:root:## executing java -Xmx4g -jar /data/home/rlazarus/galaxy/tool-data/shared/jars/picard/CreateSequenceDictionary.jar REFERENCE=/tmp/CollectAlignmentSummaryMetricsfq2hit.fasta OUTPUT=/tmp/CollectAlignmentSummaryMetricsfq2hit.dict URI=dataset_1.dat TRUNCATE_NAMES_AT_WHITESPACE=None returned status 0 and nothing on stderr
-
-Wed, 11 May 2011 23:16:24 INFO
- ## executing java -Xmx2g -jar /udd/rerla/galaxy-central/tool-data/shared/jars/CollectAlignmentSummaryMetrics.jar VALIDATION_STRINGENCY=LENIENT ASSUME_SORTED=true ADAPTER_SEQUENCE= IS_BISULFITE_SEQUENCED=false MAX_INSERT_SIZE=100000 OUTPUT=/udd/rerla/galaxy-central/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetrics.metrics.txt R=/udd/rerla/galaxy-central/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetricsZJS8q6.fasta_fake.fasta TMP_DIR=/tmp INPUT=/export/tmp/tmp1-mt_l/database/files/000/dataset_2.dat returned status 0 and stderr:
-[Wed May 11 23:16:24 EDT 2011] net.sf.picard.analysis.CollectAlignmentSummaryMetrics MAX_INSERT_SIZE=100000 ADAPTER_SEQUENCE=[AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCGGTTCAGCAGGAATGCCGAGACCGATCTCGTATGCCGTCTTCTGCTTG, AATGATACGGCGACCACCGAGATCTACACTCTTTCCCTACACGACGCTCTTCCGATCT, AGATCGGAAGAGCACACGTCTGAACTCCAGTCACNNNNNNNNATCTCGTATGCCGTCTTCTGCTTG, IS_BISULFITE_SEQUENCED=false] INPUT=/export/tmp/tmp1-mt_l/database/files/000/dataset_2.dat OUTPUT=/udd/rerla/galaxy-central/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetrics.metrics.txt REFERENCE_SEQUENCE=/udd/rerla/galaxy-central/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetricsZJS8q6.fasta_fake.fasta ASSUME_SORTED=true TMP_DIR=/tmp VALIDATION_STRINGENCY=LENIENT IS_BISULFITE_SEQUENCED=false STOP_AFTER=0 VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
-WARNING 2011-05-11 23:16:24 SinglePassSamProgram File reports sort order 'queryname', assuming it's coordinate sorted anyway.
-[Wed May 11 23:16:24 EDT 2011] net.sf.picard.analysis.CollectAlignmentSummaryMetrics done.
-Runtime.totalMemory()=9109504
-
+INFO:root:## executing java -Xmx4g -jar /data/home/rlazarus/galaxy/tool-data/shared/jars/picard/CollectAlignmentSummaryMetrics.jar VALIDATION_STRINGENCY=LENIENT ASSUME_SORTED=true ADAPTER_SEQUENCE= IS_BISULFITE_SEQUENCED=false MAX_INSERT_SIZE=100000 OUTPUT=/data/home/rlazarus/galaxy/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetrics.metrics.txt R=/data/home/rlazarus/galaxy/database/job_working_directory/3/dataset_3_files/CollectAlignmentSummaryMetricsfq2hit.fasta_fake.fasta TMP_DIR=/tmp INPUT=/data/tmp/tmpLLcl1w/database/files/000/dataset_2.dat returned status 0 and nothing on stderr
</pre><hr/>The freely available <a href="http://picard.sourceforge.net/command-line-overview.shtml">Picard software</a>
generated all outputs reported here running as a <a href="http://getgalaxy.org">Galaxy</a> tool</div></body></html>
diff -r 6ec2d7f4a64dcf6a1c49415bd8f40c8d4ca907c5 -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 tools/picard/picard_wrapper.py
--- a/tools/picard/picard_wrapper.py
+++ b/tools/picard/picard_wrapper.py
@@ -125,14 +125,15 @@
tef.close()
stderrs = self.readLarge(temperr)
stdouts = self.readLarge(templog)
- if len(stderrs) > 0:
+ if rval > 0:
s = '## executing %s returned status %d and stderr: \n%s\n' % (cl,rval,stderrs)
+ stdouts = '%s\n%s' % (stdouts,stderrs)
else:
s = '## executing %s returned status %d and nothing on stderr\n' % (cl,rval)
logging.info(s)
os.unlink(templog) # always
os.unlink(temperr) # always
- return s, stdouts # sometimes this is an output
+ return s, stdouts, rval # sometimes s is an output
def runPic(self, jar, cl):
"""
@@ -141,8 +142,8 @@
runme = ['java -Xmx%s' % self.opts.maxjheap]
runme.append('-jar %s' % jar)
runme += cl
- s,stdout = self.runCL(cl=runme, output_dir=self.opts.outdir)
- return stdout
+ s,stdouts,rval = self.runCL(cl=runme, output_dir=self.opts.outdir)
+ return stdouts,rval
def samToBam(self,infile=None,outdir=None):
"""
@@ -150,8 +151,8 @@
"""
fd,tempbam = tempfile.mkstemp(dir=outdir,suffix='rgutilsTemp.bam')
cl = ['samtools view -h -b -S -o ',tempbam,infile]
- tlog,stdouts = self.runCL(cl,outdir)
- return tlog,tempbam
+ tlog,stdouts,rval = self.runCL(cl,outdir)
+ return tlog,tempbam,rval
#def bamToSam(self,infile=None,outdir=None):
# """
@@ -167,7 +168,7 @@
"""
print '## sortSam got infile=%s,outfile=%s,outdir=%s' % (infile,outfile,outdir)
cl = ['samtools sort',infile,outfile]
- tlog,stdouts = self.runCL(cl,outdir)
+ tlog,stdouts,rval = self.runCL(cl,outdir)
return tlog
def cleanup(self):
@@ -243,6 +244,9 @@
if len(pdflist) > 0: # assumes all pdfs come with thumbnail .jpgs
for p in pdflist:
imghref = '%s.jpg' % os.path.splitext(p)[0] # removes .pdf
+ mimghref = '%s-0.jpg' % os.path.splitext(p)[0] # multiple pages pdf -> multiple thumbnails without asking!
+ if mimghref in flist:
+ imghref=mimghref
res.append('<table cellpadding="10"><tr><td>\n')
res.append('<a href="%s"><img src="%s" title="Click image preview for a print quality PDF version" hspace="10" align="middle"></a>\n' % (p,imghref))
res.append('</tr></td></table>\n')
@@ -383,6 +387,8 @@
op.add_option('', '--taillimit', default="0")
op.add_option('', '--histwidth', default="0")
op.add_option('', '--minpct', default="0.01")
+ op.add_option('', '--malevel', default="")
+ op.add_option('', '--deviations', default="0.0")
# CollectAlignmentSummaryMetrics
op.add_option('', '--maxinsert', default="20")
op.add_option('', '--adaptors', action='append', type="string")
@@ -430,7 +436,8 @@
tmp_dir = opts.outdir
haveTempout = False # we use this where sam output is an option
-
+ rval = 0
+ stdouts = 'Not run yet'
# set ref and dict files to use (create if necessary)
ref_file_name = opts.ref
if opts.ref_file <> None:
@@ -453,7 +460,7 @@
pic.delme.append(dict_file_name)
pic.delme.append(ref_file_name)
pic.delme.append(tmp_ref_name)
- s = pic.runPic(jarpath, cl)
+ stdouts,rval = pic.runPic(jarpath, cl)
# run relevant command(s)
# define temporary output
@@ -486,7 +493,7 @@
cl.append('RGCN="%s"' % opts.rg_seq_center)
if opts.rg_desc:
cl.append('RGDS="%s"' % opts.rg_desc)
- pic.runPic(opts.jar, cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
haveTempout = True
elif pic.picname == 'BamIndexStats':
@@ -499,9 +506,9 @@
pic.delme.append(tmp_bam_name)
pic.delme.append(tmp_bai_name)
pic.delme.append(tmp_name)
- s = pic.runPic( opts.jar, cl )
+ stdouts,rval = pic.runPic( opts.jar, cl )
f = open(pic.metricsOut,'a')
- f.write(s) # got this on stdout from runCl
+ f.write(stdouts) # got this on stdout from runCl
f.write('\n')
f.close()
doTranspose = False # but not transposed
@@ -519,7 +526,7 @@
cl.append('READ_NAME_REGEX="%s"' % opts.readregex)
if float(opts.optdupdist) > 0:
cl.append('OPTICAL_DUPLICATE_PIXEL_DISTANCE=%s' % opts.optdupdist)
- pic.runPic(opts.jar,cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
elif pic.picname == 'CollectAlignmentSummaryMetrics':
# Why do we do this fakefasta thing? Because we need NO fai to be available or picard barfs unless it has the same length as the input data.
@@ -532,7 +539,7 @@
info = s
shutil.copy(ref_file_name,fakefasta)
pic.delme.append(fakefasta)
- cl.append('ASSUME_SORTED=%s' % opts.assumesorted)
+ cl.append('ASSUME_SORTED=true')
adaptorseqs = ''.join([' ADAPTER_SEQUENCE=%s' % x for x in opts.adaptors])
cl.append(adaptorseqs)
cl.append('IS_BISULFITE_SEQUENCED=%s' % opts.bisulphite)
@@ -541,13 +548,24 @@
cl.append('R=%s' % fakefasta)
cl.append('TMP_DIR=%s' % opts.tmpdir)
if not opts.assumesorted.lower() == 'true': # we need to sort input
- fakeinput = '%s.sorted' % opts.input
- s = pic.sortSam(opts.input, fakeinput, opts.outdir)
- pic.delme.append(fakeinput)
- cl.append('INPUT=%s' % fakeinput)
+ sortedfile = '%s.sorted' % os.path.basename(opts.input)
+ if opts.datatype == 'sam': # need to work with a bam
+ tlog,tempbam,rval = pic.samToBam(opts.input,opts.outdir)
+ pic.delme.append(tempbam)
+ try:
+ tlog = pic.sortSam(tempbam,sortedfile,opts.outdir)
+ except:
+ print '## exception on sorting sam file %s' % opts.input
+ else: # is already bam
+ try:
+ tlog = pic.sortSam(opts.input,sortedfile,opts.outdir)
+ except: # bug - [bam_sort_core] not being ignored - TODO fixme
+ print '## exception on sorting bam file %s' % opts.input
+ cl.append('INPUT=%s.bam' % os.path.abspath(os.path.join(opts.outdir,sortedfile)))
+ pic.delme.append(os.path.join(opts.outdir,sortedfile))
else:
cl.append('INPUT=%s' % os.path.abspath(opts.input))
- pic.runPic(opts.jar,cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
elif pic.picname == 'CollectGcBiasMetrics':
@@ -575,10 +593,10 @@
cl.append('TMP_DIR=%s' % opts.tmpdir)
cl.append('CHART_OUTPUT=%s' % temppdf)
cl.append('SUMMARY_OUTPUT=%s' % pic.metricsOut)
- pic.runPic(opts.jar,cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
if os.path.isfile(temppdf):
cl2 = ['convert','-resize x400',temppdf,os.path.join(opts.outdir,jpgname)] # make the jpg for fixPicardOutputs to find
- s,stdouts = pic.runCL(cl=cl2,output_dir=opts.outdir)
+ s,stdouts,rval = pic.runCL(cl=cl2,output_dir=opts.outdir)
else:
s='### runGC: Unable to find pdf %s - please check the log for the causal problem\n' % temppdf
lf = open(pic.log_filename,'a')
@@ -587,29 +605,39 @@
lf.close()
elif pic.picname == 'CollectInsertSizeMetrics':
+ """ <command interpreter="python">
+ picard_wrapper.py -i "$input_file" -n "$out_prefix" --tmpdir "${__new_file_path__}" --deviations "$deviations"
+ --histwidth "$histWidth" --minpct "$minPct" --malevel "$malevel"
+ -j "${GALAXY_DATA_INDEX_DIR}/shared/jars/picard/CollectInsertSizeMetrics.jar" -d "$html_file.files_path" -t "$html_file"
+ </command>
+ """
isPDF = 'InsertSizeHist.pdf'
pdfpath = os.path.join(opts.outdir,isPDF)
histpdf = 'InsertSizeHist.pdf'
cl.append('I=%s' % opts.input)
cl.append('O=%s' % pic.metricsOut)
cl.append('HISTOGRAM_FILE=%s' % histpdf)
- if opts.taillimit <> '0':
- cl.append('TAIL_LIMIT=%s' % opts.taillimit)
+ #if opts.taillimit <> '0': # this was deprecated although still mentioned in the docs at 1.56
+ # cl.append('TAIL_LIMIT=%s' % opts.taillimit)
if opts.histwidth <> '0':
cl.append('HISTOGRAM_WIDTH=%s' % opts.histwidth)
if float( opts.minpct) > 0.0:
cl.append('MINIMUM_PCT=%s' % opts.minpct)
- pic.runPic(opts.jar,cl)
+ if float(opts.deviations) > 0.0:
+ cl.append('DEVIATIONS=%s' % opts.deviations)
+ if opts.malevel.strip():
+ malevels = ['METRIC_ACCUMULATION_LEVEL=%s' % x for x in opts.malevel.split(',')]
+ cl.append(' '.join(malevels))
+ stdouts,rval = pic.runPic(opts.jar, cl)
if os.path.exists(pdfpath): # automake thumbnail - will be added to html
cl2 = ['mogrify', '-format jpg -resize x400 %s' % pdfpath]
- s,stdouts = pic.runCL(cl=cl2,output_dir=opts.outdir)
+ pic.runCL(cl=cl2,output_dir=opts.outdir)
else:
s = 'Unable to find expected pdf file %s<br/>\n' % pdfpath
s += 'This <b>always happens if single ended data was provided</b> to this tool,\n'
s += 'so please double check that your input data really is paired-end NGS data.<br/>\n'
s += 'If your input was paired data this may be a bug worth reporting to the galaxy-bugs list\n<br/>'
- stdouts = ''
- logging.info(s)
+ logging.info(s)
if len(stdouts) > 0:
logging.info(stdouts)
@@ -627,13 +655,13 @@
cl.append('READ_NAME_REGEX="%s"' % opts.readregex)
# maximum offset between two duplicate clusters
cl.append('OPTICAL_DUPLICATE_PIXEL_DISTANCE=%s' % opts.optdupdist)
- pic.runPic(opts.jar, cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
elif pic.picname == 'FixMateInformation':
cl.append('I=%s' % opts.input)
cl.append('O=%s' % tempout)
cl.append('SORT_ORDER=%s' % opts.sortorder)
- pic.runPic(opts.jar,cl)
+ stdouts,rval = pic.runPic(opts.jar,cl)
haveTempout = True
elif pic.picname == 'ReorderSam':
@@ -649,14 +677,14 @@
# contig length discordance
if opts.allow_contig_len_discord == 'true':
cl.append('ALLOW_CONTIG_LENGTH_DISCORDANCE=true')
- pic.runPic(opts.jar, cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
haveTempout = True
elif pic.picname == 'ReplaceSamHeader':
cl.append('INPUT=%s' % opts.input)
cl.append('OUTPUT=%s' % tempout)
cl.append('HEADER=%s' % opts.header_file)
- pic.runPic(opts.jar, cl)
+ stdouts,rval = pic.runPic(opts.jar, cl)
haveTempout = True
elif pic.picname == 'CalculateHsMetrics':
@@ -673,7 +701,7 @@
cl.append('INPUT=%s' % os.path.abspath(opts.input))
cl.append('OUTPUT=%s' % pic.metricsOut)
cl.append('TMP_DIR=%s' % opts.tmpdir)
- pic.runPic(opts.jar,cl)
+ stdouts,rval = pic.runPic(opts.jar,cl)
elif pic.picname == 'ValidateSamFile':
import pysam
@@ -682,7 +710,7 @@
stf = open(pic.log_filename,'w')
tlog = None
if opts.datatype == 'sam': # need to work with a bam
- tlog,tempbam = pic.samToBam(opts.input,opts.outdir)
+ tlog,tempbam,rval = pic.samToBam(opts.input,opts.outdir)
try:
tlog = pic.sortSam(tempbam,sortedfile,opts.outdir)
except:
@@ -709,7 +737,7 @@
cl.append('IS_BISULFITE_SEQUENCED=true')
if opts.ref <> None or opts.ref_file <> None:
cl.append('R=%s' % ref_file_name)
- pic.runPic(opts.jar,cl)
+ stdouts,rval = pic.runPic(opts.jar,cl)
if opts.datatype == 'sam':
pic.delme.append(tempbam)
newsam = opts.output
@@ -725,10 +753,12 @@
if haveTempout:
# Some Picard tools produced a potentially intermediate bam file.
# Either just move to final location or create sam
- shutil.move(tempout, os.path.abspath(opts.output))
-
+ if os.path.exists(tempout):
+ shutil.move(tempout, os.path.abspath(opts.output))
if opts.htmlout <> None or doFix: # return a pretty html page
pic.fixPicardOutputs(transpose=doTranspose,maxloglines=maxloglines)
-
+ if rval <> 0:
+ print >> sys.stderr, '## exit code=%d; stdout=%s' % (rval,stdouts)
+ # signal failure
if __name__=="__main__": __main__()
diff -r 6ec2d7f4a64dcf6a1c49415bd8f40c8d4ca907c5 -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 tools/picard/rgPicardASMetrics.xml
--- a/tools/picard/rgPicardASMetrics.xml
+++ b/tools/picard/rgPicardASMetrics.xml
@@ -1,7 +1,7 @@
<tool name="SAM/BAM Alignment Summary Metrics" id="PicardASMetrics" version="0.03.1"><command interpreter="python">
picard_wrapper.py -i "$input_file" -d "$html_file.files_path" -t "$html_file"
- --assumesorted "$sorted" -b "$bisulphite" --adaptors "$adaptors" --maxinsert "$maxinsert" -n "$out_prefix"
+ --assumesorted "$sorted" -b "$bisulphite" --adaptors "$adaptors" --maxinsert "$maxinsert" -n "$out_prefix" --datatype "$input_file.ext"
-j ${GALAXY_DATA_INDEX_DIR}/shared/jars/picard/CollectAlignmentSummaryMetrics.jar
#if $genomeSource.refGenomeSource == "history":
--ref-file "$genomeSource.ownFile"
diff -r 6ec2d7f4a64dcf6a1c49415bd8f40c8d4ca907c5 -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 tools/picard/rgPicardInsertSize.xml
--- a/tools/picard/rgPicardInsertSize.xml
+++ b/tools/picard/rgPicardInsertSize.xml
@@ -1,9 +1,9 @@
-<tool name="Insertion size metrics" id="PicardInsertSize" version="0.3.1">
+<tool name="Insertion size metrics" id="PicardInsertSize" version="0.3.2"><description>for PAIRED data</description><requirements><requirement type="package">picard</requirement></requirements><command interpreter="python">
- picard_wrapper.py -i "$input_file" -n "$out_prefix" --tmpdir "${__new_file_path__}" --taillimit "$tailLimit"
- --histwidth "$histWidth" --minpct "$minPct"
+ picard_wrapper.py -i "$input_file" -n "$out_prefix" --tmpdir "${__new_file_path__}" --deviations "$deviations"
+ --histwidth "$histWidth" --minpct "$minPct" --malevel "$malevel"
-j "${GALAXY_DATA_INDEX_DIR}/shared/jars/picard/CollectInsertSizeMetrics.jar" -d "$html_file.files_path" -t "$html_file"
</command><inputs>
@@ -11,15 +11,22 @@
help="If empty, upload or import a SAM/BAM dataset."/><param name="out_prefix" value="Insertion size metrics" type="text"
label="Title for the output file" help="Use this remind you what the job was for" size="120" />
- <param name="tailLimit" value="10000" type="integer"
- label="Tail limit" size="5"
- help="When calculating mean and stdev stop when the bins in the tail of the distribution contain fewer than mode/TAIL_LIMIT items" />
+ <param name="deviations" value="10.0" type="float"
+ label="Deviations" size="5"
+ help="See Picard documentation: Generate mean, sd and plots by trimming the data down to MEDIAN + DEVIATIONS*MEDIAN_ABSOLUTE_DEVIATION" /><param name="histWidth" value="0" type="integer"
label="Histogram width" size="5"
- help="Explicitly sets the histogram width, overriding the TAIL_LIMIT option - leave 0 to ignore" />
- <param name="minPct" value="0.01" type="float"
+ help="Explicitly sets the histogram width option - leave 0 to ignore" />
+ <param name="minPct" value="0.05" type="float"
label="Minimum percentage" size="5"
help="Discard any data categories (out of FR, TANDEM, RF) that have fewer than this percentage of overall reads" />
+ <param name="malevel" value="0" type="select" multiple="true" label="Metric Accumulation Level"
+ help="Level(s) at which metrics will be accumulated">
+ <option value="ALL_READS" selected="true">All reads (default)</option>
+ <option value="SAMPLE" default="true">Sample</option>
+ <option value="LIBRARY" default="true">Library</option>
+ <option value="READ_GROUP" default="true">Read group</option>
+ </param></inputs><outputs><data format="html" name="html_file" label="InsertSize_${out_prefix}.html"/>
@@ -28,9 +35,10 @@
<test><param name="input_file" value="picard_input_tiny.sam" /><param name="out_prefix" value="Insertion size metrics" />
- <param name="tailLimit" value="10000" />
+ <param name="deviations" value="10.0" /><param name="histWidth" value="0" /><param name="minPct" value="0.01" />
+ <param name="malevel" value="ALL_READS" /><output name="html_file" file="picard_output_insertsize_tinysam.html" ftype="html" compare="contains" lines_diff="40" /></test></tests>
https://bitbucket.org/galaxy/galaxy-central/changeset/1da4a4a928fb/
changeset: 1da4a4a928fb
user: fubar
date: 2011-11-11 02:01:07
summary: Some minor additional cleanup to the picard suite
affected #: 1 file
diff -r a807ed5389f4da39f509aa45e5baf7027ceb7a91 -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 tools/picard/picard_wrapper.py
--- a/tools/picard/picard_wrapper.py
+++ b/tools/picard/picard_wrapper.py
@@ -102,7 +102,7 @@
pass
tmp.close()
except Exception, e:
- stop_err( 'Error : %s' % str( e ) )
+ stop_err( 'Read Large Exception : %s' % str( e ) )
return s
def runCL(self,cl=None,output_dir=None):
@@ -154,15 +154,6 @@
tlog,stdouts,rval = self.runCL(cl,outdir)
return tlog,tempbam,rval
- #def bamToSam(self,infile=None,outdir=None):
- # """
- # use samtools view to convert bam to sam
- # """
- # fd,tempsam = tempfile.mkstemp(dir=outdir,suffix='rgutilsTemp.sam')
- # cl = ['samtools view -h -o ',tempsam,infile]
- # tlog,stdouts = self.runCL(cl,outdir)
- # return tlog,tempsam
-
def sortSam(self, infile=None,outfile=None,outdir=None):
"""
"""
@@ -243,10 +234,11 @@
pdflist = [x for x in flist if os.path.splitext(x)[-1].lower() == '.pdf']
if len(pdflist) > 0: # assumes all pdfs come with thumbnail .jpgs
for p in pdflist:
- imghref = '%s.jpg' % os.path.splitext(p)[0] # removes .pdf
- mimghref = '%s-0.jpg' % os.path.splitext(p)[0] # multiple pages pdf -> multiple thumbnails without asking!
+ pbase = os.path.splitext(p)[0] # removes .pdf
+ imghref = '%s.jpg' % pbase
+ mimghref = '%s-0.jpg' % pbase # multiple pages pdf -> multiple thumbnails without asking!
if mimghref in flist:
- imghref=mimghref
+ imghref=mimghref # only one for thumbnail...it's a multi page pdf
res.append('<table cellpadding="10"><tr><td>\n')
res.append('<a href="%s"><img src="%s" title="Click image preview for a print quality PDF version" hspace="10" align="middle"></a>\n' % (p,imghref))
res.append('</tr></td></table>\n')
@@ -387,7 +379,7 @@
op.add_option('', '--taillimit', default="0")
op.add_option('', '--histwidth', default="0")
op.add_option('', '--minpct', default="0.01")
- op.add_option('', '--malevel', default="")
+ op.add_option('', '--malevel', action='append', type="string")
op.add_option('', '--deviations', default="0.0")
# CollectAlignmentSummaryMetrics
op.add_option('', '--maxinsert', default="20")
@@ -529,13 +521,14 @@
stdouts,rval = pic.runPic(opts.jar, cl)
elif pic.picname == 'CollectAlignmentSummaryMetrics':
- # Why do we do this fakefasta thing? Because we need NO fai to be available or picard barfs unless it has the same length as the input data.
+ # Why do we do this fakefasta thing?
+ # Because we need NO fai to be available or picard barfs unless it matches the input data.
# why? Dunno Seems to work without complaining if the .bai file is AWOL....
fakefasta = os.path.join(opts.outdir,'%s_fake.fasta' % os.path.basename(ref_file_name))
try:
os.symlink(ref_file_name,fakefasta)
except:
- s = '## unable to symlink %s to %s - different devices? May need to replace with shutil.copy'
+ s = '## unable to symlink %s to %s - different devices? Will shutil.copy'
info = s
shutil.copy(ref_file_name,fakefasta)
pic.delme.append(fakefasta)
@@ -550,7 +543,7 @@
if not opts.assumesorted.lower() == 'true': # we need to sort input
sortedfile = '%s.sorted' % os.path.basename(opts.input)
if opts.datatype == 'sam': # need to work with a bam
- tlog,tempbam,rval = pic.samToBam(opts.input,opts.outdir)
+ tlog,tempbam,trval = pic.samToBam(opts.input,opts.outdir)
pic.delme.append(tempbam)
try:
tlog = pic.sortSam(tempbam,sortedfile,opts.outdir)
@@ -559,8 +552,8 @@
else: # is already bam
try:
tlog = pic.sortSam(opts.input,sortedfile,opts.outdir)
- except: # bug - [bam_sort_core] not being ignored - TODO fixme
- print '## exception on sorting bam file %s' % opts.input
+ except : # bug - [bam_sort_core] not being ignored - TODO fixme
+ print '## exception %s on sorting bam file %s' % (sys.exc_info()[0],opts.input)
cl.append('INPUT=%s.bam' % os.path.abspath(os.path.join(opts.outdir,sortedfile)))
pic.delme.append(os.path.join(opts.outdir,sortedfile))
else:
@@ -625,8 +618,8 @@
cl.append('MINIMUM_PCT=%s' % opts.minpct)
if float(opts.deviations) > 0.0:
cl.append('DEVIATIONS=%s' % opts.deviations)
- if opts.malevel.strip():
- malevels = ['METRIC_ACCUMULATION_LEVEL=%s' % x for x in opts.malevel.split(',')]
+ if opts.malevel:
+ malevels = ['METRIC_ACCUMULATION_LEVEL=%s' % x for x in opts.malevel]
cl.append(' '.join(malevels))
stdouts,rval = pic.runPic(opts.jar, cl)
if os.path.exists(pdfpath): # automake thumbnail - will be added to html
https://bitbucket.org/galaxy/galaxy-central/changeset/0474dfc4d30f/
changeset: 0474dfc4d30f
user: fubar
date: 2011-11-11 02:45:25
summary: branch merge
affected #: 14 files
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -19,7 +19,6 @@
from Cheetah.Template import Template
-
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree, ElementInclude
from elementtree.ElementTree import Element
@@ -1298,6 +1297,9 @@
group_list_grid = None
quota_list_grid = None
repository_list_grid = None
+ delete_operation = None
+ undelete_operation = None
+ purge_operation = None
@web.expose
@web.require_admin
@@ -2217,6 +2219,13 @@
**kwd ) )
elif operation == "manage roles and groups":
return self.manage_roles_and_groups_for_user( trans, **kwd )
+ if trans.app.config.allow_user_deletion:
+ if self.delete_operation not in self.user_list_grid.operations:
+ self.user_list_grid.operations.append( self.delete_operation )
+ if self.undelete_operation not in self.user_list_grid.operations:
+ self.user_list_grid.operations.append( self.undelete_operation )
+ if self.purge_operation not in self.user_list_grid.operations:
+ self.user_list_grid.operations.append( self.purge_operation )
# Render the list view
return self.user_list_grid( trans, **kwd )
@web.expose
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py
+++ b/lib/galaxy/web/controllers/admin.py
@@ -93,11 +93,6 @@
allow_popup=False,
url_args=dict( webapp="galaxy", action="reset_user_password" ) )
]
- #TODO: enhance to account for trans.app.config.allow_user_deletion here so that we can eliminate these operations if
- # the setting is False
- #operations.append( grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), allow_multiple=True ) )
- #operations.append( grids.GridOperation( "Undelete", condition=( lambda item: item.deleted and not item.purged ), allow_multiple=True ) )
- #operations.append( grids.GridOperation( "Purge", condition=( lambda item: item.deleted and not item.purged ), allow_multiple=True ) )
standard_filters = [
grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
grids.GridColumnFilter( "Deleted", args=dict( deleted=True, purged=False ) ),
@@ -443,6 +438,9 @@
group_list_grid = GroupListGrid()
quota_list_grid = QuotaListGrid()
repository_list_grid = RepositoryListGrid()
+ delete_operation = grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), allow_multiple=True )
+ undelete_operation = grids.GridOperation( "Undelete", condition=( lambda item: item.deleted and not item.purged ), allow_multiple=True )
+ purge_operation = grids.GridOperation( "Purge", condition=( lambda item: item.deleted and not item.purged ), allow_multiple=True )
@web.expose
@web.require_admin
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -61,6 +61,7 @@
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
+ self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/community" ), self.root )
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd lib/galaxy/webapps/demo_sequencer/config.py
--- a/lib/galaxy/webapps/demo_sequencer/config.py
+++ b/lib/galaxy/webapps/demo_sequencer/config.py
@@ -40,6 +40,7 @@
self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
+ self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/demo_sequencer" ), self.root )
self.admin_users = kwargs.get( "admin_users", "" )
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd lib/galaxy/webapps/reports/config.py
--- a/lib/galaxy/webapps/reports/config.py
+++ b/lib/galaxy/webapps/reports/config.py
@@ -31,6 +31,8 @@
self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/reports" ), self.root )
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
+ self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
+ self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
self.brand = kwargs.get( 'brand', None )
self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/images/fugue/chevron-expand-bw.png
Binary file static/images/fugue/chevron-expand-bw.png has changed
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/images/fugue/toggle-bw.png
Binary file static/images/fugue/toggle-bw.png has changed
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/images/fugue/toggle-expand-bw.png
Binary file static/images/fugue/toggle-expand-bw.png has changed
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl
+++ b/static/june_2007_style/base.css.tmpl
@@ -865,10 +865,6 @@
-sprite-group: fugue;
-sprite-image: fugue/toggle-expand.png;
}
-.icon-button.toggle {
- -sprite-group: fugue;
- -sprite-image: fugue/toggle.png;
-}
.icon-button.toggle-contract {
-sprite-group: fugue;
-sprite-image: fugue/toggle.png;
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -151,9 +151,10 @@
.icon-button.tag{background:url(fugue.png) no-repeat 0px -0px;}
.icon-button.tags{background:url(fugue.png) no-repeat 0px -26px;}
.icon-button.tag--plus{background:url(fugue.png) no-repeat 0px -52px;}
-.icon-button.toggle-expand{background:url(fugue.png) no-repeat 0px -78px;}
-.icon-button.toggle{background:url(fugue.png) no-repeat 0px -104px;}
-.icon-button.toggle-contract{background:url(fugue.png) no-repeat 0px -104px;}
+.icon-button.toggle-expand{background:transparent url(../images/fugue/toggle-expand-bw.png) no-repeat;}
+.icon-button.toggle-expand:hover{background:url(fugue.png) no-repeat 0px -78px;}
+.icon-button.toggle-contract{background:transparent url(../images/fugue/toggle-bw.png) no-repeat;}
+.icon-button.toggle-contract:hover{background:url(fugue.png) no-repeat 0px -104px;}
.icon-button.arrow-circle{background:url(fugue.png) no-repeat 0px -130px;}
.icon-button.chevron{background:url(fugue.png) no-repeat 0px -156px;}
.icon-button.bug{background:url(fugue.png) no-repeat 0px -182px;}
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/june_2007_style/blue/trackster.css
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -56,7 +56,9 @@
.tool-name{font-size:110%;font-weight:bold;}
.param-row{margin-top:0.2em;margin-left:1em;}
.param-label{float:left;font-weight:bold;padding-top:0.2em;}
-.menu-button{padding:0px 4px 0px 4px;}
+.menu-button{margin:0px 4px 0px 4px;}
+.chevron-expand{background:transparent url(../images/fugue/chevron-expand-bw.png) no-repeat;}
+.chevron-expand:hover{background:transparent url(../images/fugue/chevron-expand.png) no-repeat;}
.settings-icon{background:transparent url(../images/fugue/gear-bw.png) no-repeat;}
.settings-icon:hover{background:transparent url(../images/fugue/gear.png) no-repeat;}
.overview-icon{background:transparent url(../images/fugue/application-dock-270-bw.png) no-repeat;}
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/june_2007_style/trackster.css.tmpl
--- a/static/june_2007_style/trackster.css.tmpl
+++ b/static/june_2007_style/trackster.css.tmpl
@@ -295,7 +295,13 @@
padding-top: 0.2em;
}
.menu-button {
- padding: 0px 4px 0px 4px;
+ margin: 0px 4px 0px 4px;
+}
+.chevron-expand {
+ background: transparent url(../images/fugue/chevron-expand-bw.png) no-repeat;
+}
+.chevron-expand:hover {
+ background:transparent url(../images/fugue/chevron-expand.png) no-repeat;
}
.settings-icon {
background: transparent url(../images/fugue/gear-bw.png) no-repeat;
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -2381,16 +2381,18 @@
// Create and initialize track header and icons.
if (show_header) {
- this.header_div = $("<div class='track-header' />").appendTo(this.container_div);
+ this.header_div = $("<div class='track-header'/>").appendTo(this.container_div);
if (this.view.editor) { this.drag_div = $("<div/>").addClass(this.drag_handle_class).appendTo(this.header_div); }
this.name_div = $("<div/>").addClass("track-name").appendTo(this.header_div).text(this.name)
.attr( "id", this.name.replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() );
this.icons_div = $("<div/>").css("float", "left").appendTo(this.header_div).hide();
// Track icons.
+ this.mode_icon = $("<a/>").attr("href", "javascript:void(0);").attr("title", "Set display mode")
+ .addClass("icon-button chevron-expand").tipsy( {gravity: 's'} ).appendTo(this.icons_div);
this.toggle_icon = $("<a/>").attr("href", "javascript:void(0);").attr("title", "Hide/show track content")
- .addClass("icon-button toggle").tipsy( {gravity: 's'} )
- .appendTo(this.icons_div);
+ .addClass("icon-button toggle-contract").tipsy( {gravity: 's'} )
+ .appendTo(this.icons_div);
this.settings_icon = $("<a/>").attr("href", "javascript:void(0);").attr("title", "Edit settings")
.addClass("icon-button settings-icon").tipsy( {gravity: 's'} )
.appendTo(this.icons_div);
@@ -2407,21 +2409,42 @@
.addClass("icon-button remove-icon").tipsy( {gravity: 's'} )
.appendTo(this.icons_div);
var track = this;
-
+
// Suppress double clicks in header so that they do not impact viz.
this.header_div.dblclick( function(e) { e.stopPropagation(); } );
+
+ // Set up behavior for modes popup.
+ if (track.display_modes !== undefined) {
+ var init_mode = (track.config && track.config.values['mode'] ?
+ track.config.values['mode'] : track.display_modes[0]);
+ track.mode = init_mode;
+ this.mode_icon.attr("title", "Set display mode (now: " + track.mode + ")");
+
+ var mode_mapping = {};
+ for (var i = 0, len = track.display_modes.length; i < len; i++) {
+ var mode = track.display_modes[i];
+ mode_mapping[mode] = function(mode) {
+ return function() {
+ track.change_mode(mode);
+ // HACK: the popup menu messes with the track's hover event, so manually show/hide
+ // icons div for now.
+ track.icons_div.show();
+ track.container_div.mouseleave(function() { track.icons_div.hide(); } ); };
+ }(mode);
+ }
+
+ make_popupmenu(this.mode_icon, mode_mapping);
+ }
- // Toggle icon hides or shows the track content
+ // Toggle icon hides or shows the track content.
this.toggle_icon.click( function() {
if ( track.content_visible ) {
- track.toggle_icon.addClass("toggle-expand").removeClass("toggle");
+ track.toggle_icon.addClass("toggle-expand").removeClass("toggle-contract");
track.hide_contents();
- track.mode_div.hide();
track.content_visible = false;
} else {
- track.toggle_icon.addClass("toggle").removeClass("toggle-expand");
+ track.toggle_icon.addClass("toggle-contract").removeClass("toggle-expand");
track.content_visible = true;
- track.mode_div.show();
track.show_contents();
}
});
@@ -2481,35 +2504,12 @@
$(".tipsy").remove();
track.remove();
});
+
+ // Show icons when users is hovering over track.
+ this.container_div.hover( function() { track.icons_div.show(); }, function() { track.icons_div.hide(); } );
- // Set up behavior for modes popup.
- if (track.display_modes !== undefined) {
- if (track.mode_div === undefined) {
- track.mode_div = $("<div class='right-float menubutton popup' />").appendTo(track.header_div);
- var init_mode = (track.config && track.config.values['mode'] ?
- track.config.values['mode'] : track.display_modes[0]);
- track.mode = init_mode;
- track.mode_div.text(init_mode);
-
- var mode_mapping = {};
- for (var i = 0, len = track.display_modes.length; i < len; i++) {
- var mode = track.display_modes[i];
- mode_mapping[mode] = function(mode) {
- return function() { track.change_mode(mode); };
- }(mode);
- }
- make_popupmenu(track.mode_div, mode_mapping);
- } else {
- track.mode_div.hide();
- }
-
- this.header_div.append( $("<div/>").css("clear", "both") );
-
- // Set up config icon.
-
- // Show icons when users is hovering over track.
- this.container_div.hover( function() { track.icons_div.show(); }, function() { track.icons_div.hide(); } );
- }
+ // Needed for floating elts in header.
+ $("<div style='clear: both'/>").appendTo(this.container_div);
}
//
@@ -2696,12 +2696,12 @@
*/
change_mode: function(name) {
var track = this;
- track.mode_div.text(name);
// TODO: is it necessary to store the mode in two places (.mode and track_config)?
track.mode = name;
track.config.values['mode'] = name;
track.tile_cache.clear();
track.request_draw();
+ this.mode_icon.attr("title", "Set display mode (now: " + track.mode + ")");
return track;
},
/**
@@ -3405,13 +3405,14 @@
}
},
update_auto_mode: function( mode ) {
+ var mode;
if ( this.mode == "Auto" ) {
if ( mode == "no_detail" ) {
mode = "feature spans";
} else if ( mode == "summary_tree" ) {
mode = "coverage histogram";
}
- this.mode_div.text( "Auto (" + mode + ")" );
+ this.mode_icon.attr("title", "Set display mode (now: Auto/" + mode + ")");
}
},
/**
diff -r 1da4a4a928fb46a7725375a2cb0baa18f32c44b4 -r 0474dfc4d30f0d77aac42f1a0a8355998f213bfd tools/picard/picard_wrapper.py
--- a/tools/picard/picard_wrapper.py
+++ b/tools/picard/picard_wrapper.py
@@ -379,11 +379,11 @@
op.add_option('', '--taillimit', default="0")
op.add_option('', '--histwidth', default="0")
op.add_option('', '--minpct', default="0.01")
- op.add_option('', '--malevel', action='append', type="string")
+ op.add_option('', '--malevel', default='')
op.add_option('', '--deviations', default="0.0")
# CollectAlignmentSummaryMetrics
op.add_option('', '--maxinsert', default="20")
- op.add_option('', '--adaptors', action='append', type="string")
+ op.add_option('', '--adaptors', default='')
# FixMateInformation and validate
# CollectGcBiasMetrics
op.add_option('', '--windowsize', default='100')
@@ -533,8 +533,9 @@
shutil.copy(ref_file_name,fakefasta)
pic.delme.append(fakefasta)
cl.append('ASSUME_SORTED=true')
- adaptorseqs = ''.join([' ADAPTER_SEQUENCE=%s' % x for x in opts.adaptors])
- cl.append(adaptorseqs)
+ adaptlist = opts.adaptors.split(',')
+ adaptorseqs = ['ADAPTER_SEQUENCE=%s' % x for x in adaptlist]
+ cl += adaptorseqs
cl.append('IS_BISULFITE_SEQUENCED=%s' % opts.bisulphite)
cl.append('MAX_INSERT_SIZE=%s' % opts.maxinsert)
cl.append('OUTPUT=%s' % pic.metricsOut)
@@ -619,8 +620,9 @@
if float(opts.deviations) > 0.0:
cl.append('DEVIATIONS=%s' % opts.deviations)
if opts.malevel:
- malevels = ['METRIC_ACCUMULATION_LEVEL=%s' % x for x in opts.malevel]
- cl.append(' '.join(malevels))
+ malists = opts.malevel.split(',')
+ malist = ['METRIC_ACCUMULATION_LEVEL=%s' % x for x in malists]
+ cl += malist
stdouts,rval = pic.runPic(opts.jar, cl)
if os.path.exists(pdfpath): # automake thumbnail - will be added to html
cl2 = ['mogrify', '-format jpg -resize x400 %s' % pdfpath]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0