galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
August 2013
- 1 participants
- 149 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6e72b04fcb58/
Changeset: 6e72b04fcb58
User: abretaud
Date: 2013-06-27 11:45:46
Summary: When creating the temp directory for dataset zip downloads, use the Galaxy user's umask and group on the directory. (same idea as 068a043)
Affected #: 1 file
diff -r 3fa9df444b4b81f94b1c42a033c685a6e23827be -r 6e72b04fcb582208a9a2bb57a942d8f2a582e94c lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -230,6 +230,7 @@
if (params.do_action == 'zip'):
# Can't use mkstemp - the file must not exist first
tmpd = tempfile.mkdtemp()
+ util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, trans.app.config.gid )
tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
if ziptype == '64':
archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
https://bitbucket.org/galaxy/galaxy-central/commits/446e2fee6b8f/
Changeset: 446e2fee6b8f
User: dannon
Date: 2013-08-07 20:38:31
Summary: Merged in abretaud/galaxy-central (pull request #191)
When creating the temp directory for dataset zip downloads, use the Galaxy user's umask and group on the directory.
Affected #: 1 file
diff -r b8cf5887ad464707887aaf8381df19dbc67ac697 -r 446e2fee6b8f978472e67031f429aeda6cdfffdd lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -230,6 +230,7 @@
if (params.do_action == 'zip'):
# Can't use mkstemp - the file must not exist first
tmpd = tempfile.mkdtemp()
+ util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, trans.app.config.gid )
tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
if ziptype == '64':
archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Relocate the tool shed's container_util.
by commits-noreply@bitbucket.org 07 Aug '13
by commits-noreply@bitbucket.org 07 Aug '13
07 Aug '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b8cf5887ad46/
Changeset: b8cf5887ad46
User: greg
Date: 2013-08-07 20:37:22
Summary: Relocate the tool shed's container_util.
Affected #: 11 files
diff -r f02a75ce05b71457845c0e5f56f409047a841967 -r b8cf5887ad464707887aaf8381df19dbc67ac697 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -17,6 +17,7 @@
from galaxy.util import json
from galaxy.model.orm import and_
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import container_util
from tool_shed.util import encoding_util
from tool_shed.util import export_util
from tool_shed.util import metadata_util
@@ -30,7 +31,6 @@
from tool_shed.util import workflow_util
from tool_shed.galaxy_install import repository_util
from galaxy.webapps.tool_shed.util import common_util
-from galaxy.webapps.tool_shed.util import container_util
import galaxy.tools
import tool_shed.grids.repository_grids as repository_grids
import tool_shed.grids.util as grids_util
diff -r f02a75ce05b71457845c0e5f56f409047a841967 -r b8cf5887ad464707887aaf8381df19dbc67ac697 lib/galaxy/webapps/tool_shed/controllers/repository_review.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
@@ -5,7 +5,7 @@
from sqlalchemy.sql.expression import func
from galaxy.model.orm import and_
from galaxy.webapps.tool_shed.util import common_util
-from galaxy.webapps.tool_shed.util.container_util import STRSEP
+from tool_shed.util.container_util import STRSEP
import tool_shed.util.shed_util_common as suc
from tool_shed.util import review_util
from galaxy.util.odict import odict
diff -r f02a75ce05b71457845c0e5f56f409047a841967 -r b8cf5887ad464707887aaf8381df19dbc67ac697 lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ /dev/null
@@ -1,1408 +0,0 @@
-import logging
-import os
-import threading
-from galaxy.util import asbool
-from galaxy.web.framework.helpers import time_ago
-from tool_shed.util import readme_util
-import tool_shed.util.shed_util_common as suc
-
-log = logging.getLogger( __name__ )
-
-# String separator
-STRSEP = '__ESEP__'
-
-
-class Folder( object ):
- """Container object."""
-
- def __init__( self, id=None, key=None, label=None, parent=None ):
- self.id = id
- self.key = key
- self.label = label
- self.parent = parent
- self.description = None
- self.datatypes = []
- self.folders = []
- self.invalid_repository_dependencies = []
- self.invalid_tool_dependencies = []
- self.invalid_tools = []
- self.installation_errors = []
- self.current_repository_installation_errors = []
- self.repository_installation_errors = []
- self.tool_dependency_installation_errors = []
- self.valid_tools = []
- self.valid_data_managers = []
- self.invalid_data_managers = []
- self.tool_dependencies = []
- self.failed_tests = []
- self.missing_test_components = []
- self.not_tested = []
- self.passed_tests = []
- self.test_environments = []
- self.repository_dependencies = []
- self.readme_files = []
- self.workflows = []
-
- def contains_folder( self, folder ):
- for index, contained_folder in enumerate( self.folders ):
- if folder == contained_folder:
- return index, contained_folder
- return 0, None
-
- def contains_repository_dependency( self, repository_dependency ):
- listified_repository_dependency = repository_dependency.listify
- for contained_repository_dependency in self.repository_dependencies:
- if contained_repository_dependency.listify == listified_repository_dependency:
- return True
- return False
-
- def remove_repository_dependency( self, repository_dependency ):
- listified_repository_dependency = repository_dependency.listify
- for contained_repository_dependency in self.repository_dependencies:
- if contained_repository_dependency.listify == listified_repository_dependency:
- self.repository_dependencies.remove( contained_repository_dependency )
-
- def to_repository_dependency( self, repository_dependency_id ):
- toolshed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( self.key.split( STRSEP ) )
- return RepositoryDependency( id=repository_dependency_id,
- toolshed=toolshed,
- repository_name=name,
- repository_owner=owner,
- changeset_revision=changeset_revision,
- prior_installation_required=asbool( prior_installation_required ) )
-
-
-class DataManager( object ):
- """Data Manager object"""
-
- def __init__( self, id=None, name=None, version=None, data_tables=None ):
- self.id = id
- self.name = name
- self.version = version
- self.data_tables = data_tables
-
-
-class Datatype( object ):
- """Datatype object"""
-
- def __init__( self, id=None, extension=None, type=None, mimetype=None, subclass=None, converters=None, display_app_containers=None ):
- self.id = id
- self.extension = extension
- self.type = type
- self.mimetype = mimetype
- self.subclass = subclass
- self.converters = converters
- self.display_app_containers = display_app_containers
-
-
-class FailedTest( object ):
- """Failed tool tests object"""
-
- def __init__( self, id=None, stderr=None, test_id=None, tool_id=None, tool_version=None, traceback=None ):
- self.id = id
- self.stderr = stderr
- self.test_id = test_id
- self.tool_id = tool_id
- self.tool_version = tool_version
- self.traceback = traceback
-
-
-class InvalidDataManager( object ):
- """Invalid data Manager object"""
-
- def __init__( self, id=None, index=None, error=None ):
- self.id = id
- self.index = index
- self.error = error
-
-
-class InvalidRepositoryDependency( object ):
- """Invalid repository dependency definition object"""
-
- def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, prior_installation_required=False, error=None ):
- self.id = id
- self.toolshed = toolshed
- self.repository_name = repository_name
- self.repository_owner = repository_owner
- self.changeset_revision = changeset_revision
- self.prior_installation_required = prior_installation_required
- self.error = error
-
-
-class InvalidTool( object ):
- """Invalid tool object"""
-
- def __init__( self, id=None, tool_config=None, repository_id=None, changeset_revision=None, repository_installation_status=None ):
- self.id = id
- self.tool_config = tool_config
- self.repository_id = repository_id
- self.changeset_revision = changeset_revision
- self.repository_installation_status = repository_installation_status
-
-
-class InvalidToolDependency( object ):
- """Invalid tool dependency definition object"""
-
- def __init__( self, id=None, name=None, version=None, type=None, error=None ):
- self.id = id
- self.name = name
- self.version = version
- self.type = type
- self.error = error
-
-
-class MissingTestComponent( object ):
- """Missing tool test components object"""
-
- def __init__( self, id=None, missing_components=None, tool_guid=None, tool_id=None, tool_version=None ):
- self.id = id
- self.missing_components = missing_components
- self.tool_guid = tool_guid
- self.tool_id = tool_id
- self.tool_version = tool_version
-
-
-class NotTested( object ):
- """NotTested object"""
-
- def __init__( self, id=None, reason=None ):
- self.id = id
- self.reason = reason
-
-
-class PassedTest( object ):
- """Passed tool tests object"""
-
- def __init__( self, id=None, test_id=None, tool_id=None, tool_version=None ):
- self.id = id
- self.test_id = test_id
- self.tool_id = tool_id
- self.tool_version = tool_version
-
-
-class ReadMe( object ):
- """Readme text object"""
-
- def __init__( self, id=None, name=None, text=None ):
- self.id = id
- self.name = name
- self.text = text
-
-
-class RepositoryDependency( object ):
- """Repository dependency object"""
-
- def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, prior_installation_required=False,
- installation_status=None, tool_shed_repository_id=None ):
- self.id = id
- self.toolshed = toolshed
- self.repository_name = repository_name
- self.repository_owner = repository_owner
- self.changeset_revision = changeset_revision
- self.prior_installation_required = prior_installation_required
- self.installation_status = installation_status
- self.tool_shed_repository_id = tool_shed_repository_id
-
- @property
- def listify( self ):
- return [ self.toolshed, self.repository_name, self.repository_owner, self.changeset_revision, asbool( str( self.prior_installation_required ) ) ]
-
-
-class RepositoryInstallationError( object ):
- """Repository installation error object"""
-
- def __init__( self, id=None, tool_shed=None, name=None, owner=None, changeset_revision=None, error_message=None ):
- self.id = id
- self.tool_shed = tool_shed
- self.name = name
- self.owner = owner
- self.changeset_revision = changeset_revision
- self.error_message = error_message
-
-
-class TestEnvironment( object ):
- """Tool test environment object"""
-
- def __init__( self, id=None, architecture=None, galaxy_database_version=None, galaxy_revision=None, python_version=None, system=None, time_last_tested=None,
- tool_shed_database_version=None, tool_shed_mercurial_version=None, tool_shed_revision=None ):
- self.id = id
- self.architecture = architecture
- self.galaxy_database_version = galaxy_database_version
- self.galaxy_revision = galaxy_revision
- self.python_version = python_version
- self.system = system
- self.time_last_tested = time_last_tested
- self.tool_shed_database_version = tool_shed_database_version
- self.tool_shed_mercurial_version = tool_shed_mercurial_version
- self.tool_shed_revision = tool_shed_revision
-
-
-class Tool( object ):
- """Tool object"""
-
- def __init__( self, id=None, tool_config=None, tool_id=None, name=None, description=None, version=None, requirements=None,
- repository_id=None, changeset_revision=None, repository_installation_status=None ):
- self.id = id
- self.tool_config = tool_config
- self.tool_id = tool_id
- self.name = name
- self.description = description
- self.version = version
- self.requirements = requirements
- self.repository_id = repository_id
- self.changeset_revision = changeset_revision
- self.repository_installation_status = repository_installation_status
-
-
-class ToolDependency( object ):
- """Tool dependency object"""
-
- def __init__( self, id=None, name=None, version=None, type=None, readme=None, installation_status=None, repository_id=None,
- tool_dependency_id=None, is_orphan=None ):
- self.id = id
- self.name = name
- self.version = version
- self.type = type
- self.readme = readme
- self.installation_status = installation_status
- self.repository_id = repository_id
- self.tool_dependency_id = tool_dependency_id
- self.is_orphan = is_orphan
-
- @property
- def listify( self ):
- return [ self.name, self.version, self.type ]
-
-
-class ToolDependencyInstallationError( object ):
- """Tool dependency installation error object"""
-
- def __init__( self, id=None, type=None, name=None, version=None, error_message=None ):
- self.id = id
- self.type = type
- self.name = name
- self.version = version
- self.error_message = error_message
-
-
-class Workflow( object ):
- """Workflow object."""
-
- def __init__( self, id=None, workflow_name=None, steps=None, format_version=None, annotation=None, repository_metadata_id=None, repository_id=None ):
- # When rendered in the tool shed, repository_metadata_id will have a value and repository_id will be None. When rendered in Galaxy, repository_id
- # will have a value and repository_metadata_id will be None.
- self.id = id
- self.workflow_name = workflow_name
- self.steps = steps
- self.format_version = format_version
- self.annotation = annotation
- self.repository_metadata_id = repository_metadata_id
- self.repository_id = repository_id
-
-def add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ):
- """Inspect all received tool dependencies and label those that are orphans within the repository."""
- orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None )
- new_tool_dependencies = {}
- if tool_dependencies:
- for td_key, requirements_dict in tool_dependencies.items():
- if td_key in [ 'set_environment' ]:
- # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
- if orphan_env_dependencies:
- new_set_environment_dict_list = []
- for set_environment_dict in requirements_dict:
- if set_environment_dict in orphan_env_dependencies:
- set_environment_dict[ 'is_orphan' ] = True
- else:
- set_environment_dict[ 'is_orphan' ] = False
- new_set_environment_dict_list.append( set_environment_dict )
- new_tool_dependencies[ td_key ] = new_set_environment_dict_list
- else:
- new_tool_dependencies[ td_key ] = requirements_dict
- else:
- # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
- if td_key in orphan_tool_dependencies:
- requirements_dict[ 'is_orphan' ] = True
- else:
- requirements_dict[ 'is_orphan' ] = False
- new_tool_dependencies[ td_key ] = requirements_dict
- return new_tool_dependencies
-
-def build_data_managers_folder( trans, folder_id, data_managers, label=None ):
- """Return a folder hierarchy containing Data Managers."""
- if data_managers:
- if label is None:
- label = "Data Managers"
- data_manager_id = 0
- folder_id += 1
- data_managers_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- key = "valid_data_managers"
- folder = Folder( id=folder_id, key=key, label=label, parent=data_managers_root_folder )
- data_managers_root_folder.folders.append( folder )
- # Insert a header row.
- data_manager_id += 1
- data_manager = DataManager( id=data_manager_id,
- name='Name',
- version='Version',
- data_tables='Data Tables' )
- folder.valid_data_managers.append( data_manager )
- for data_manager_dict in data_managers.itervalues():
- data_manager_id += 1
- data_manager = DataManager( id=data_manager_id,
- name=data_manager_dict.get( 'name', '' ),
- version=data_manager_dict.get( 'version', '' ),
- data_tables=", ".join( data_manager_dict.get( 'data_tables', '' ) ) )
- folder.valid_data_managers.append( data_manager )
- else:
- data_managers_root_folder = None
- return folder_id, data_managers_root_folder
-
-def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ):
- """Return a folder hierarchy containing datatypes."""
- if datatypes:
- datatype_id = 0
- folder_id += 1
- datatypes_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- folder = Folder( id=folder_id, key='datatypes', label=label, parent=datatypes_root_folder )
- datatypes_root_folder.folders.append( folder )
- # Insert a header row.
- datatype_id += 1
- datatype = Datatype( id=datatype_id,
- extension='extension',
- type='type',
- mimetype='mimetype',
- subclass='subclass' )
- folder.datatypes.append( datatype )
- for datatypes_dict in datatypes:
- # {"converters":
- # [{"target_datatype": "gff",
- # "tool_config": "bed_to_gff_converter.xml",
- # "guid": "localhost:9009/repos/test/bed_to_gff_converter/CONVERTER_bed_to_gff_0/2.0.0"}],
- # "display_in_upload": "true",
- # "dtype": "galaxy.datatypes.interval:Bed",
- # "extension": "bed"}
- # TODO: converters and display_app information is not currently rendered. Should it be?
- # Handle defined converters, if any.
- converters = datatypes_dict.get( 'converters', None )
- if converters:
- num_converters = len( converters )
- else:
- num_converters = 0
- # Handle defined display applications, if any.
- display_app_containers = datatypes_dict.get( 'display_app_containers', None )
- if display_app_containers:
- num_display_app_containers = len( display_app_containers )
- else:
- num_display_app_containers = 0
- datatype_id += 1
- datatype = Datatype( id=datatype_id,
- extension=datatypes_dict.get( 'extension', '' ),
- type=datatypes_dict.get( 'dtype', '' ),
- mimetype=datatypes_dict.get( 'mimetype', '' ),
- subclass=datatypes_dict.get( 'subclass', '' ),
- converters=num_converters,
- display_app_containers=num_display_app_containers )
- folder.datatypes.append( datatype )
- else:
- datatypes_root_folder = None
- return folder_id, datatypes_root_folder
-
-def build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages=None, label=None ):
- """Return a folder hierarchy containing invalid Data Managers."""
- if data_managers or error_messages:
- if label is None:
- label = "Invalid Data Managers"
- data_manager_id = 0
- folder_id += 1
- data_managers_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- key = "invalid_data_managers"
- folder = Folder( id=folder_id, key=key, label=label, parent=data_managers_root_folder )
- data_managers_root_folder.folders.append( folder )
- # Insert a header row.
- data_manager_id += 1
- data_manager = InvalidDataManager( id=data_manager_id,
- index='Element Index',
- error='Error' )
- folder.invalid_data_managers.append( data_manager )
- if error_messages:
- for error_message in error_messages:
- data_manager_id += 1
- data_manager = InvalidDataManager( id=data_manager_id,
- index=0,
- error=error_message )
- folder.invalid_data_managers.append( data_manager )
- has_errors = True
- for data_manager_dict in data_managers:
- data_manager_id += 1
- data_manager = InvalidDataManager( id=data_manager_id,
- index=data_manager_dict.get( 'index', 0 ) + 1,
- error=data_manager_dict.get( 'error_message', '' ) )
- folder.invalid_data_managers.append( data_manager )
- has_errors = True
- else:
- data_managers_root_folder = None
- return folder_id, data_managers_root_folder
-
-def build_invalid_repository_dependencies_root_folder( trans, folder_id, invalid_repository_dependencies_dict ):
- """Return a folder hierarchy containing invalid repository dependencies."""
- label = 'Invalid repository dependencies'
- if invalid_repository_dependencies_dict:
- invalid_repository_dependency_id = 0
- folder_id += 1
- invalid_repository_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- invalid_repository_dependencies_folder = Folder( id=folder_id,
- key='invalid_repository_dependencies',
- label=label,
- parent=invalid_repository_dependencies_root_folder )
- invalid_repository_dependencies_root_folder.folders.append( invalid_repository_dependencies_folder )
- invalid_repository_dependencies = invalid_repository_dependencies_dict[ 'repository_dependencies' ]
- for invalid_repository_dependency in invalid_repository_dependencies:
- folder_id += 1
- invalid_repository_dependency_id += 1
- toolshed, name, owner, changeset_revision, prior_installation_required, error = \
- suc.parse_repository_dependency_tuple( invalid_repository_dependency, contains_error=True )
- key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision, prior_installation_required )
- label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( name, changeset_revision, owner )
- folder = Folder( id=folder_id,
- key=key,
- label=label,
- parent=invalid_repository_dependencies_folder )
- ird = InvalidRepositoryDependency( id=invalid_repository_dependency_id,
- toolshed=toolshed,
- repository_name=name,
- repository_owner=owner,
- changeset_revision=changeset_revision,
- prior_installation_required=asbool( prior_installation_required ),
- error=error )
- folder.invalid_repository_dependencies.append( ird )
- invalid_repository_dependencies_folder.folders.append( folder )
- else:
- invalid_repository_dependencies_root_folder = None
- return folder_id, invalid_repository_dependencies_root_folder
-
-def build_invalid_tool_dependencies_root_folder( trans, folder_id, invalid_tool_dependencies_dict ):
- """Return a folder hierarchy containing invalid tool dependencies."""
- # # INvalid tool dependencies are always packages like:
- # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1" "error" : "some sting" }
- label = 'Invalid tool dependencies'
- if invalid_tool_dependencies_dict:
- invalid_tool_dependency_id = 0
- folder_id += 1
- invalid_tool_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- invalid_tool_dependencies_folder = Folder( id=folder_id,
- key='invalid_tool_dependencies',
- label=label,
- parent=invalid_tool_dependencies_root_folder )
- invalid_tool_dependencies_root_folder.folders.append( invalid_tool_dependencies_folder )
- for td_key, requirements_dict in invalid_tool_dependencies_dict.items():
- folder_id += 1
- invalid_tool_dependency_id += 1
- name = requirements_dict[ 'name' ]
- type = requirements_dict[ 'type' ]
- version = requirements_dict[ 'version' ]
- error = requirements_dict[ 'error' ]
- key = generate_tool_dependencies_key( name, version, type )
- label = "Version <b>%s</b> of the <b>%s</b><b>%s</b>" % ( version, name, type )
- folder = Folder( id=folder_id,
- key=key,
- label=label,
- parent=invalid_tool_dependencies_folder )
- itd = InvalidToolDependency( id=invalid_tool_dependency_id,
- name=name,
- version=version,
- type=type,
- error=error )
- folder.invalid_tool_dependencies.append( itd )
- invalid_tool_dependencies_folder.folders.append( folder )
- else:
- invalid_tool_dependencies_root_folder = None
- return folder_id, invalid_tool_dependencies_root_folder
-
-def build_invalid_tools_folder( trans, folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools' ):
- """Return a folder hierarchy containing invalid tools."""
- # TODO: Should we display invalid tools on the tool panel selection page when installing the repository into Galaxy?
- if invalid_tool_configs:
- invalid_tool_id = 0
- folder_id += 1
- invalid_tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- folder = Folder( id=folder_id, key='invalid_tools', label=label, parent=invalid_tools_root_folder )
- invalid_tools_root_folder.folders.append( folder )
- for invalid_tool_config in invalid_tool_configs:
- invalid_tool_id += 1
- if repository:
- repository_id = repository.id
- if trans.webapp.name == 'galaxy':
- repository_installation_status = repository.status
- else:
- repository_installation_status = None
- else:
- repository_id = None
- repository_installation_status = None
- invalid_tool = InvalidTool( id=invalid_tool_id,
- tool_config=invalid_tool_config,
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- repository_installation_status=repository_installation_status )
- folder.invalid_tools.append( invalid_tool )
- else:
- invalid_tools_root_folder = None
- return folder_id, invalid_tools_root_folder
-
-def build_readme_files_folder( trans, folder_id, readme_files_dict, label='Readme files' ):
- """Return a folder hierarchy containing readme text files."""
- if readme_files_dict:
- multiple_readme_files = len( readme_files_dict ) > 1
- readme_id = 0
- folder_id += 1
- readme_files_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- if multiple_readme_files:
- folder_id += 1
- readme_files_folder = Folder( id=folder_id, key='readme_files', label=label, parent=readme_files_root_folder )
- readme_files_root_folder.folders.append( readme_files_folder )
- for readme_file_name, readme_file_text in readme_files_dict.items():
- readme_id += 1
- readme = ReadMe( id=readme_id, name=readme_file_name, text=readme_file_text )
- if multiple_readme_files:
- folder_id += 1
- folder = Folder( id=folder_id, key=readme.name, label=readme.name, parent=readme_files_folder )
- folder.readme_files.append( readme )
- readme_files_folder.folders.append( folder )
- else:
- folder_id += 1
- readme_files_folder = Folder( id=folder_id, key='readme_files', label=readme.name, parent=readme_files_root_folder )
- readme_files_folder.readme_files.append( readme )
- readme_files_root_folder.folders.append( readme_files_folder )
- else:
- readme_files_root_folder = None
- return folder_id, readme_files_root_folder
-
-def build_repository_containers_for_galaxy( trans, repository, datatypes, invalid_tools, missing_repository_dependencies, missing_tool_dependencies,
- readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, valid_data_managers,
- invalid_data_managers, data_managers_errors, new_install=False, reinstalling=False ):
- """Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
- containers_dict = dict( datatypes=None,
- invalid_tools=None,
- missing_tool_dependencies=None,
- readme_files=None,
- repository_dependencies=None,
- missing_repository_dependencies=None,
- tool_dependencies=None,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None,
- invalid_data_managers=None )
- # Some of the tool dependency folders will include links to display tool dependency information, and some of these links require the repository
- # id. However we need to be careful because sometimes the repository object is None.
- if repository:
- repository_id = repository.id
- changeset_revision = repository.changeset_revision
- else:
- repository_id = None
- changeset_revision = None
- lock = threading.Lock()
- lock.acquire( True )
- try:
- folder_id = 0
- # Datatypes container.
- if datatypes:
- folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes )
- containers_dict[ 'datatypes' ] = datatypes_root_folder
- # Invalid tools container.
- if invalid_tools:
- folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans,
- folder_id,
- invalid_tools,
- changeset_revision,
- repository=repository,
- label='Invalid tools' )
- containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
- # Readme files container.
- if readme_files_dict:
- folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict )
- containers_dict[ 'readme_files' ] = readme_files_root_folder
- # Installed repository dependencies container.
- if repository_dependencies:
- if new_install:
- label = 'Repository dependencies'
- else:
- label = 'Installed repository dependencies'
- folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies,
- label=label,
- installed=True )
- containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
- # Missing repository dependencies container.
- if missing_repository_dependencies:
- folder_id, missing_repository_dependencies_root_folder = \
- build_repository_dependencies_folder( trans=trans,
- folder_id=folder_id,
- repository_dependencies=missing_repository_dependencies,
- label='Missing repository dependencies',
- installed=False )
- containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder
- # Installed tool dependencies container.
- if tool_dependencies:
- if new_install:
- label = 'Tool dependencies'
- else:
- label = 'Installed tool dependencies'
- # We only want to display the Status column if the tool_dependency is missing.
- folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
- folder_id,
- tool_dependencies,
- label=label,
- missing=False,
- new_install=new_install,
- reinstalling=reinstalling )
- containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
- # Missing tool dependencies container.
- if missing_tool_dependencies:
- # We only want to display the Status column if the tool_dependency is missing.
- folder_id, missing_tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
- folder_id,
- missing_tool_dependencies,
- label='Missing tool dependencies',
- missing=True,
- new_install=new_install,
- reinstalling=reinstalling )
- containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
- # Valid tools container.
- if valid_tools:
- folder_id, valid_tools_root_folder = build_tools_folder( trans,
- folder_id,
- valid_tools,
- repository,
- changeset_revision,
- label='Valid tools' )
- containers_dict[ 'valid_tools' ] = valid_tools_root_folder
- # Workflows container.
- if workflows:
- folder_id, workflows_root_folder = build_workflows_folder( trans=trans,
- folder_id=folder_id,
- workflows=workflows,
- repository_metadata_id=None,
- repository_id=repository_id,
- label='Workflows' )
- containers_dict[ 'workflows' ] = workflows_root_folder
- if valid_data_managers:
- folder_id, valid_data_managers_root_folder = build_data_managers_folder( trans=trans,
- folder_id=folder_id,
- data_managers=valid_data_managers,
- label='Valid Data Managers' )
- containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder
- if invalid_data_managers or data_managers_errors:
- folder_id, invalid_data_managers_root_folder = build_invalid_data_managers_folder( trans=trans,
- folder_id=folder_id,
- data_managers=invalid_data_managers,
- error_messages=data_managers_errors,
- label='Invalid Data Managers' )
- containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder
- except Exception, e:
- log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
- finally:
- lock.release()
- return containers_dict
-
-def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata, exclude=None ):
- """Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed."""
- if exclude is None:
- exclude = []
- containers_dict = dict( datatypes=None,
- invalid_tools=None,
- readme_files=None,
- repository_dependencies=None,
- tool_dependencies=None,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None
- )
- if repository_metadata:
- metadata = repository_metadata.metadata
- tool_test_results = repository_metadata.tool_test_results
- try:
- time_last_tested = time_ago( repository_metadata.time_last_tested )
- except:
- time_last_tested = None
- lock = threading.Lock()
- lock.acquire( True )
- try:
- folder_id = 0
- # Datatypes container.
- if metadata:
- if 'datatypes' not in exclude and 'datatypes' in metadata:
- datatypes = metadata[ 'datatypes' ]
- folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes )
- containers_dict[ 'datatypes' ] = datatypes_root_folder
- # Invalid repository dependencies container.
- if metadata:
- if 'invalid_repository_dependencies' not in exclude and 'invalid_repository_dependencies' in metadata:
- invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ]
- folder_id, invalid_repository_dependencies_root_folder = \
- build_invalid_repository_dependencies_root_folder( trans,
- folder_id,
- invalid_repository_dependencies )
- containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder
- # Invalid tool dependencies container.
- if metadata:
- if 'invalid_tool_dependencies' not in exclude and 'invalid_tool_dependencies' in metadata:
- invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ]
- folder_id, invalid_tool_dependencies_root_folder = \
- build_invalid_tool_dependencies_root_folder( trans,
- folder_id,
- invalid_tool_dependencies )
- containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder
- # Invalid tools container.
- if metadata:
- if 'invalid_tools' not in exclude and 'invalid_tools' in metadata:
- invalid_tool_configs = metadata[ 'invalid_tools' ]
- folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans,
- folder_id,
- invalid_tool_configs,
- changeset_revision,
- repository=repository,
- label='Invalid tools' )
- containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
- # Readme files container.
- if metadata:
- if 'readme_files' not in exclude and 'readme_files' in metadata:
- readme_files_dict = readme_util.build_readme_files_dict( metadata )
- folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict )
- containers_dict[ 'readme_files' ] = readme_files_root_folder
- if 'repository_dependencies' not in exclude:
- # Repository dependencies container.
- folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans,
- folder_id=folder_id,
- repository_dependencies=repository_dependencies,
- label='Repository dependencies',
- installed=False )
- if repository_dependencies_root_folder:
- containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
- # Tool dependencies container.
- if metadata:
- if 'tool_dependencies' not in exclude and 'tool_dependencies' in metadata:
- tool_dependencies = metadata[ 'tool_dependencies' ]
- if trans.webapp.name == 'tool_shed':
- if 'orphan_tool_dependencies' in metadata:
- orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
- tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies )
- folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans,
- folder_id,
- tool_dependencies,
- missing=False,
- new_install=False )
- containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
- # Valid tools container.
- if metadata:
- if 'tools' not in exclude and 'tools' in metadata:
- valid_tools = metadata[ 'tools' ]
- folder_id, valid_tools_root_folder = build_tools_folder( trans,
- folder_id,
- valid_tools,
- repository,
- changeset_revision,
- label='Valid tools' )
- containers_dict[ 'valid_tools' ] = valid_tools_root_folder
- # Tool test results container.
- if 'tool_test_results' not in exclude and tool_test_results and len( tool_test_results ) > 1:
- # Only create and populate this folder if there are actual tool test results to display, since the display of the 'Test environment'
- # folder by itself can be misleading. We check for more than a single entry in the tool_test_results dictionary because it may have
- # only the "test_environment" entry, but we want at least 1 of "passed_tests", "failed_tests", "installation_errors", "missing_test_components"
- # "skipped_tests", "not_tested" or any other entry that may be added in the future.
- folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
- containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
- # Workflows container.
- if metadata:
- if 'workflows' not in exclude and 'workflows' in metadata:
- workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = build_workflows_folder( trans=trans,
- folder_id=folder_id,
- workflows=workflows,
- repository_metadata_id=repository_metadata.id,
- repository_id=None,
- label='Workflows' )
- containers_dict[ 'workflows' ] = workflows_root_folder
- # Valid Data Managers container
- if metadata:
- if 'data_manager' not in exclude and 'data_manager' in metadata:
- data_managers = metadata['data_manager'].get( 'data_managers', None )
- folder_id, data_managers_root_folder = build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" )
- containers_dict[ 'valid_data_managers' ] = data_managers_root_folder
- error_messages = metadata['data_manager'].get( 'error_messages', None )
- data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
- folder_id, data_managers_root_folder = build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" )
- containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder
- except Exception, e:
- log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
- finally:
- lock.release()
- return containers_dict
-
-def build_repository_dependencies_folder( trans, folder_id, repository_dependencies, label='Repository dependencies', installed=False ):
- """Return a folder hierarchy containing repository dependencies."""
- if repository_dependencies:
- repository_dependency_id = 0
- folder_id += 1
- # Create the root folder.
- repository_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- # Create the Repository dependencies folder and add it to the root folder.
- repository_dependencies_folder_key = repository_dependencies[ 'root_key' ]
- repository_dependencies_folder = Folder( id=folder_id, key=repository_dependencies_folder_key, label=label, parent=repository_dependencies_root_folder )
- del repository_dependencies[ 'root_key' ]
- # The received repository_dependencies is a dictionary with keys: 'root_key', 'description', and one or more repository_dependency keys.
- # We want the description value associated with the repository_dependencies_folder.
- repository_dependencies_folder.description = repository_dependencies.get( 'description', None )
- repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
- del repository_dependencies[ 'description' ]
- repository_dependencies_folder, folder_id, repository_dependency_id = \
- populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
- repository_dependencies_folder = prune_repository_dependencies( repository_dependencies_folder )
- else:
- repository_dependencies_root_folder = None
- return folder_id, repository_dependencies_root_folder
-
-def build_tools_folder( trans, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
- """Return a folder hierarchy containing valid tools."""
- if tool_dicts:
- tool_id = 0
- folder_id += 1
- tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- folder = Folder( id=folder_id, key='tools', label=label, parent=tools_root_folder )
- if trans.webapp.name == 'galaxy':
- folder.description = 'click the name to inspect the tool metadata'
- tools_root_folder.folders.append( folder )
- # Insert a header row.
- tool_id += 1
- tool = Tool( id=tool_id,
- tool_config='',
- tool_id='',
- name='Name',
- description='Description',
- version='Version',
- requirements='',
- repository_id='',
- changeset_revision='' )
- folder.valid_tools.append( tool )
- if repository:
- repository_id = repository.id
- if trans.webapp.name == 'galaxy':
- repository_installation_status = repository.status
- else:
- repository_installation_status = None
- else:
- repository_id = None
- repository_installation_status = None
- for tool_dict in tool_dicts:
- tool_id += 1
- if 'requirements' in tool_dict:
- requirements = tool_dict[ 'requirements' ]
- requirements_str = ''
- for requirement_dict in requirements:
- requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
- requirements_str = requirements_str.rstrip( ', ' )
- else:
- requirements_str = 'none'
- tool = Tool( id=tool_id,
- tool_config=tool_dict[ 'tool_config' ],
- tool_id=tool_dict[ 'id' ],
- name=tool_dict[ 'name' ],
- description=tool_dict[ 'description' ],
- version=tool_dict[ 'version' ],
- requirements=requirements_str,
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- repository_installation_status=repository_installation_status )
- folder.valid_tools.append( tool )
- else:
- tools_root_folder = None
- return folder_id, tools_root_folder
-
-def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False, reinstalling=False ):
- """Return a folder hierarchy containing tool dependencies."""
- # When we're in Galaxy (not the tool shed) and the tool dependencies are not installed or are in an error state, they are considered missing. The tool
- # dependency status will be displayed only if a record exists for the tool dependency in the Galaxy database, but the tool dependency is not installed.
- # The value for new_install will be True only if the associated repository in being installed for the first time. This value is used in setting the
- # container description.
- if tool_dependencies:
- tool_dependency_id = 0
- folder_id += 1
- tool_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
- if trans.webapp.name == 'galaxy':
- if new_install or reinstalling:
- folder.description = "repository tools require handling of these dependencies"
- elif missing and not new_install and not reinstalling:
- folder.description = 'click the name to install the missing dependency'
- else:
- folder.description = 'click the name to browse the dependency installation directory'
- tool_dependencies_root_folder.folders.append( folder )
- # Insert a header row.
- tool_dependency_id += 1
- if trans.webapp.name == 'galaxy':
- tool_dependency = ToolDependency( id=tool_dependency_id,
- name='Name',
- version='Version',
- type='Type',
- readme=None,
- installation_status='Installation status',
- repository_id=None,
- tool_dependency_id=None,
- is_orphan=None )
- else:
- tool_dependency = ToolDependency( id=tool_dependency_id,
- name='Name',
- version='Version',
- type='Type',
- readme=None,
- installation_status=None,
- repository_id=None,
- tool_dependency_id=None,
- is_orphan='Orphan' )
- folder.tool_dependencies.append( tool_dependency )
- is_orphan_description = "these dependencies may not be required by tools in this repository"
- for dependency_key, requirements_dict in tool_dependencies.items():
- tool_dependency_id += 1
- if dependency_key in [ 'set_environment' ]:
- for set_environment_dict in requirements_dict:
- if trans.webapp.name == 'tool_shed':
- is_orphan = set_environment_dict.get( 'is_orphan', False )
- else:
- # TODO: handle this is Galaxy
- is_orphan = False
- if is_orphan:
- folder.description = is_orphan_description
- name = set_environment_dict.get( 'name', None )
- type = set_environment_dict[ 'type' ]
- repository_id = set_environment_dict.get( 'repository_id', None )
- td_id = set_environment_dict.get( 'tool_dependency_id', None )
- if trans.webapp.name == 'galaxy':
- installation_status = set_environment_dict.get( 'status', 'Never installed' )
- else:
- installation_status = None
- tool_dependency = ToolDependency( id=tool_dependency_id,
- name=name,
- version=None,
- type=type,
- readme=None,
- installation_status=installation_status,
- repository_id=repository_id,
- tool_dependency_id=td_id,
- is_orphan=is_orphan )
- folder.tool_dependencies.append( tool_dependency )
- else:
- if trans.webapp.name == 'tool_shed':
- is_orphan = requirements_dict.get( 'is_orphan', False )
- else:
- # TODO: handle this is Galaxy
- is_orphan = False
- if is_orphan:
- folder.description = is_orphan_description
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- type = requirements_dict[ 'type' ]
- repository_id = requirements_dict.get( 'repository_id', None )
- td_id = requirements_dict.get( 'tool_dependency_id', None )
- if trans.webapp.name == 'galaxy':
- installation_status = requirements_dict.get( 'status', 'Never installed' )
- else:
- installation_status = None
- tool_dependency = ToolDependency( id=tool_dependency_id,
- name=name,
- version=version,
- type=type,
- readme=None,
- installation_status=installation_status,
- repository_id=repository_id,
- tool_dependency_id=td_id,
- is_orphan=is_orphan )
- folder.tool_dependencies.append( tool_dependency )
- else:
- tool_dependencies_root_folder = None
- return folder_id, tool_dependencies_root_folder
-
-def build_tool_test_results_folder( trans, folder_id, tool_test_results_dict, label='Tool test results', time_last_tested=None ):
- """Return a folder hierarchy containing tool dependencies."""
- # This container is displayed only in the tool shed.
- if tool_test_results_dict:
- folder_id += 1
- tool_test_results_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- test_environment_dict = tool_test_results_dict.get( 'test_environment', None )
- if test_environment_dict:
- folder_id += 1
- test_results_folder = Folder( id=folder_id, key='test_results', label=label, parent=tool_test_results_root_folder )
- tool_test_results_root_folder.folders.append( test_results_folder )
- folder_id += 1
- folder = Folder( id=folder_id, key='test_environment', label='Automated test environment', parent=test_results_folder )
- test_results_folder.folders.append( folder )
- test_environment = TestEnvironment( id=1,
- architecture=test_environment_dict.get( 'architecture', '' ),
- galaxy_database_version=test_environment_dict.get( 'galaxy_database_version', '' ),
- galaxy_revision=test_environment_dict.get( 'galaxy_revision', '' ),
- python_version=test_environment_dict.get( 'python_version', '' ),
- system=test_environment_dict.get( 'system', '' ),
- time_last_tested=time_last_tested,
- tool_shed_database_version=test_environment_dict.get( 'tool_shed_database_version', '' ),
- tool_shed_mercurial_version=test_environment_dict.get( 'tool_shed_mercurial_version', '' ),
- tool_shed_revision=test_environment_dict.get( 'tool_shed_revision', '' ) )
- folder.test_environments.append( test_environment )
- not_tested_dict = tool_test_results_dict.get( 'not_tested', {} )
- if not_tested_dict:
- folder_id += 1
- folder = Folder( id=folder_id, key='not_tested', label='Not tested', parent=test_results_folder )
- test_results_folder.folders.append( folder )
- not_tested_id = 0
- not_tested = NotTested( id=not_tested_id,
- reason=not_tested_dict.get( 'reason', '' ) )
- folder.not_tested.append( not_tested )
- passed_tests_dicts = tool_test_results_dict.get( 'passed_tests', [] )
- if passed_tests_dicts:
- folder_id += 1
- folder = Folder( id=folder_id, key='passed_tests', label='Tests that passed successfully', parent=test_results_folder )
- test_results_folder.folders.append( folder )
- passed_test_id = 0
- for passed_tests_dict in passed_tests_dicts:
- passed_test_id += 1
- passed_test = PassedTest( id=passed_test_id,
- test_id=passed_tests_dict.get( 'test_id' '' ),
- tool_id=passed_tests_dict.get( 'tool_id', '' ),
- tool_version=passed_tests_dict.get( 'tool_version', '' ) )
- folder.passed_tests.append( passed_test )
- failed_tests_dicts = tool_test_results_dict.get( 'failed_tests', [] )
- if failed_tests_dicts:
- folder_id += 1
- folder = Folder( id=folder_id, key='failed_tests', label='Tests that failed', parent=test_results_folder )
- test_results_folder.folders.append( folder )
- failed_test_id = 0
- for failed_tests_dict in failed_tests_dicts:
- failed_test_id += 1
- failed_test = FailedTest( id=failed_test_id,
- stderr=failed_tests_dict.get( 'stderr', '' ),
- test_id=failed_tests_dict.get( 'test_id', '' ),
- tool_id=failed_tests_dict.get( 'tool_id', '' ),
- tool_version=failed_tests_dict.get( 'tool_version', '' ),
- traceback=failed_tests_dict.get( 'traceback', '' ) )
- folder.failed_tests.append( failed_test )
- missing_test_components_dicts = tool_test_results_dict.get( 'missing_test_components', [] )
- if missing_test_components_dicts:
- folder_id += 1
- folder = Folder( id=folder_id, key='missing_test_components', label='Tools missing tests or test data', parent=test_results_folder )
- test_results_folder.folders.append( folder )
- missing_test_component_id = 0
- for missing_test_components_dict in missing_test_components_dicts:
- missing_test_component_id += 1
- missing_test_component = MissingTestComponent( id=missing_test_component_id,
- missing_components=missing_test_components_dict.get( 'missing_components', '' ),
- tool_guid=missing_test_components_dict.get( 'tool_guid', '' ),
- tool_id=missing_test_components_dict.get( 'tool_id', '' ),
- tool_version=missing_test_components_dict.get( 'tool_version', '' ) )
- folder.missing_test_components.append( missing_test_component )
- installation_error_dicts = tool_test_results_dict.get( 'installation_errors', {} )
- if installation_error_dicts:
- current_repository_errors = installation_error_dicts.get( 'current_repository', [] )
- repository_dependency_errors = installation_error_dicts.get( 'repository_dependencies', [] )
- tool_dependency_errors = installation_error_dicts.get( 'tool_dependencies', [] )
- if current_repository_errors or repository_dependency_errors or tool_dependency_errors:
- folder_id += 1
- installation_error_base_folder = Folder( id=folder_id,
- key='installation_errors',
- label='Installation errors',
- parent=test_results_folder )
- if current_repository_errors:
- folder_id += 1
- subfolder = Folder( id=folder_id,
- key='current_repository_errors',
- label='This repository',
- parent=installation_error_base_folder )
- repository_error_id = 0
- for repository_error_dict in current_repository_errors:
- repository_error_id += 1
- repository_installation_error = RepositoryInstallationError( id=repository_error_id,
- tool_shed=repository_error_dict.get( 'tool_shed', '' ),
- name=repository_error_dict.get( 'name', '' ),
- owner=repository_error_dict.get( 'owner', '' ),
- changeset_revision=repository_error_dict.get( 'changeset_revision', '' ),
- error_message=repository_error_dict.get( 'error_message', '' ) )
- subfolder.current_repository_installation_errors.append( repository_installation_error )
- installation_error_base_folder.folders.append( subfolder )
- if repository_dependency_errors:
- folder_id += 1
- subfolder = Folder( id=folder_id,
- key='repository_dependency_errors',
- label='Repository dependencies',
- parent=installation_error_base_folder )
- repository_error_id = 0
- for repository_error_dict in repository_dependency_errors:
- repository_error_id += 1
- repository_installation_error = RepositoryInstallationError( id=repository_error_id,
- tool_shed=repository_error_dict.get( 'tool_shed', '' ),
- name=repository_error_dict.get( 'name', '' ),
- owner=repository_error_dict.get( 'owner', '' ),
- changeset_revision=repository_error_dict.get( 'changeset_revision', '' ),
- error_message=repository_error_dict.get( 'error_message', '' ) )
- subfolder.repository_installation_errors.append( repository_installation_error )
- installation_error_base_folder.folders.append( subfolder )
- if tool_dependency_errors:
- folder_id += 1
- subfolder = Folder( id=folder_id,
- key='tool_dependency_errors',
- label='Tool dependencies',
- parent=installation_error_base_folder )
- tool_dependency_error_id = 0
- for tool_dependency_error_dict in tool_dependency_errors:
- tool_dependency_error_id += 1
- tool_dependency_installation_error = ToolDependencyInstallationError( id=tool_dependency_error_id,
- type=tool_dependency_error_dict.get( 'type', '' ),
- name=tool_dependency_error_dict.get( 'name', '' ),
- version=tool_dependency_error_dict.get( 'version', '' ),
- error_message=tool_dependency_error_dict.get( 'error_message', '' ) )
- subfolder.tool_dependency_installation_errors.append( tool_dependency_installation_error )
- installation_error_base_folder.folders.append( subfolder )
- test_results_folder.installation_errors.append( installation_error_base_folder )
- else:
- tool_test_results_root_folder = None
- return folder_id, tool_test_results_root_folder
-
-def build_workflows_folder( trans, folder_id, workflows, repository_metadata_id=None, repository_id=None, label='Workflows' ):
- """
- Return a folder hierarchy containing workflow objects for each workflow dictionary in the received workflows list. When
- this method is called from the tool shed, repository_metadata_id will have a value and repository_id will be None. When
- this method is called from Galaxy, repository_id will have a value only if the repository is not currenlty being installed
- and repository_metadata_id will be None.
- """
- if workflows:
- workflow_id = 0
- folder_id += 1
- workflows_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
- folder_id += 1
- folder = Folder( id=folder_id, key='workflows', label=label, parent=workflows_root_folder )
- workflows_root_folder.folders.append( folder )
- # Insert a header row.
- workflow_id += 1
- workflow = Workflow( id=workflow_id,
- workflow_name='Name',
- steps='steps',
- format_version='format-version',
- annotation='annotation',
- repository_metadata_id=repository_metadata_id,
- repository_id=repository_id )
- folder.workflows.append( workflow )
- for workflow_tup in workflows:
- workflow_dict=workflow_tup[ 1 ]
- steps = workflow_dict.get( 'steps', [] )
- if steps:
- steps = str( len( steps ) )
- else:
- steps = 'unknown'
- workflow_id += 1
- workflow = Workflow( id=workflow_id,
- workflow_name=workflow_dict.get( 'name', '' ),
- steps=steps,
- format_version=workflow_dict.get( 'format-version', '' ),
- annotation=workflow_dict.get( 'annotation', '' ),
- repository_metadata_id=repository_metadata_id,
- repository_id=repository_id )
- folder.workflows.append( workflow )
- else:
- workflows_root_folder = None
- return folder_id, workflows_root_folder
-
-def cast_empty_repository_dependency_folders( folder, repository_dependency_id ):
- """
- Change any empty folders contained within the repository dependencies container into a repository dependency since it has no repository dependencies
- of it's own. This method is not used (and may not be needed), but here it is just in case.
- """
- if not folder.folders and not folder.repository_dependencies:
- repository_dependency_id += 1
- repository_dependency = folder.to_repository_dependency( repository_dependency_id )
- if not folder.parent.contains_repository_dependency( repository_dependency ):
- folder.parent.repository_dependencies.append( repository_dependency )
- folder.parent.folders.remove( folder )
- for sub_folder in folder.folders:
- return cast_empty_repository_dependency_folders( sub_folder, repository_dependency_id )
- return folder, repository_dependency_id
-
-def generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, prior_installation_required, key ):
- """Return a repository dependency label based on the repository dependency key."""
- if key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, prior_installation_required, key ):
- label = 'Repository dependencies'
- else:
- if prior_installation_required:
- prior_installation_required_str = " <i>(prior install required)</i>"
- else:
- prior_installation_required_str = ""
- label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>%s" % \
- ( repository_name, changeset_revision, repository_owner, prior_installation_required_str )
- return label
-
-def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required ):
- # FIXME: assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported.
- return '%s%s%s%s%s%s%s%s%s' % ( str( toolshed_base_url ).rstrip( '/' ),
- STRSEP,
- str( repository_name ),
- STRSEP,
- str( repository_owner ),
- STRSEP,
- str( changeset_revision ),
- STRSEP,
- str( prior_installation_required ) )
-
-def generate_tool_dependencies_key( name, version, type ):
- return '%s%s%s%s%s' % ( str( name ), STRSEP, str( version ), STRSEP, str( type ) )
-
-def get_folder( folder, key ):
- if folder.key == key:
- return folder
- for sub_folder in folder.folders:
- return get_folder( sub_folder, key )
- return None
-
-def get_components_from_key( key ):
- # FIXME: assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported.
- items = key.split( STRSEP )
- toolshed_base_url = items[ 0 ]
- repository_name = items[ 1 ]
- repository_owner = items[ 2 ]
- changeset_revision = items[ 3 ]
- if len( items ) == 5:
- prior_installation_required = asbool( str( items[ 4 ] ) )
- return toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required
- else:
- # For backward compatibility to the 12/20/12 Galaxy release we have to return the following, and callers must handle exceptions.
- return toolshed_base_url, repository_name, repository_owner, changeset_revision
-
-def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
- try:
- toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = get_components_from_key( rd_key )
- except ValueError:
- # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required to False.
- toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
- prior_installation_required = False
- folder = get_folder( repository_dependencies_folder, rd_key )
- label = generate_repository_dependencies_folder_label_from_key( repository_name,
- repository_owner,
- changeset_revision,
- prior_installation_required,
- repository_dependencies_folder.key )
- if folder:
- if rd_key not in folder_keys:
- folder_id += 1
- sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=folder )
- folder.folders.append( sub_folder )
- else:
- sub_folder = folder
- else:
- folder_id += 1
- sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=repository_dependencies_folder )
- repository_dependencies_folder.folders.append( sub_folder )
- if trans.webapp.name == 'galaxy':
- # Insert a header row.
- repository_dependency_id += 1
- repository_dependency = RepositoryDependency( id=repository_dependency_id,
- repository_name='Name',
- changeset_revision='Revision',
- repository_owner='Owner',
- installation_status='Installation status' )
- # Insert the header row into the folder.
- sub_folder.repository_dependencies.append( repository_dependency )
- for repository_dependency in rd_value:
- if trans.webapp.name == 'galaxy':
- if len( repository_dependency ) == 6:
- # Metadata should have been reset on this installed repository, but it wasn't.
- tool_shed_repository_id = repository_dependency[ 4 ]
- installation_status = repository_dependency[ 5 ]
- tool_shed, name, owner, changeset_revision = repository_dependency[ 0:4 ]
- # Default prior_installation_required to False.
- prior_installation_required = False
- repository_dependency = [ tool_shed, name, owner, changeset_revision, prior_installation_required ]
- elif len( repository_dependency ) == 7:
- # We have a repository dependency tuple that includes a prior_installation_required value.
- tool_shed_repository_id = repository_dependency[ 5 ]
- installation_status = repository_dependency[ 6 ]
- repository_dependency = repository_dependency[ 0:5 ]
- else:
- tool_shed_repository_id = None
- installation_status = 'unknown'
- else:
- tool_shed_repository_id = None
- installation_status = None
- can_create_dependency = not is_subfolder_of( sub_folder, repository_dependency )
- if can_create_dependency:
- toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = \
- suc.parse_repository_dependency_tuple( repository_dependency )
- repository_dependency_id += 1
- repository_dependency = RepositoryDependency( id=repository_dependency_id,
- toolshed=toolshed,
- repository_name=repository_name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
- prior_installation_required=asbool( prior_installation_required ),
- installation_status=installation_status,
- tool_shed_repository_id=tool_shed_repository_id )
- # Insert the repository_dependency into the folder.
- sub_folder.repository_dependencies.append( repository_dependency )
- return repository_dependencies_folder, folder_id, repository_dependency_id
-
-def is_subfolder_of( folder, repository_dependency ):
- toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = \
- suc.parse_repository_dependency_tuple( repository_dependency )
- key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision, asbool( prior_installation_required ) )
- for sub_folder in folder.folders:
- if key == sub_folder.key:
- return True
- return False
-
-def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, prior_installation_required, key ):
- try:
- toolshed_base_url, key_name, key_owner, key_changeset_revision, key_prior_installation_required = get_components_from_key( key )
- except ValueError:
- # For backward compatibility to the 12/20/12 Galaxy release, default key_prior_installation_required to False.
- toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
- key_prior_installation_required = False
- return repository_name == key_name and \
- repository_owner == key_owner and \
- changeset_revision == key_changeset_revision and \
- prior_installation_required == key_prior_installation_required
-
-def populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
- folder_keys = repository_dependencies.keys()
- for key, value in repository_dependencies.items():
- repository_dependencies_folder, folder_id, repository_dependency_id = \
- handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
- return repository_dependencies_folder, folder_id, repository_dependency_id
-
-def print_folders( pad, folder ):
- # For debugging...
- pad_str = ''
- for i in range( 1, pad ):
- pad_str += ' '
- print '%sid: %s key: %s' % ( pad_str, str( folder.id ), folder.key )
- for repository_dependency in folder.repository_dependencies:
- print ' %s%s' % ( pad_str, repository_dependency.listify )
- for sub_folder in folder.folders:
- print_folders( pad+5, sub_folder )
-
-def prune_repository_dependencies( folder ):
- """
- Since the object used to generate a repository dependencies container is a dictionary and not an odict() (it must be json-serialize-able), the
- order in which the dictionary is processed to create the container sometimes results in repository dependency entries in a folder that also
- includes the repository dependency as a sub-folder (if the repository dependency has it's own repository dependency). This method will remove
- all repository dependencies from folder that are also sub-folders of folder.
- """
- repository_dependencies = [ rd for rd in folder.repository_dependencies ]
- for repository_dependency in repository_dependencies:
- listified_repository_dependency = repository_dependency.listify
- if is_subfolder_of( folder, listified_repository_dependency ):
- repository_dependencies.remove( repository_dependency )
- folder.repository_dependencies = repository_dependencies
- for sub_folder in folder.folders:
- return prune_repository_dependencies( sub_folder )
- return folder
-
\ No newline at end of file
diff -r f02a75ce05b71457845c0e5f56f409047a841967 -r b8cf5887ad464707887aaf8381df19dbc67ac697 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -8,10 +8,10 @@
from galaxy import util
from galaxy import web
from galaxy.model.orm import or_
-from galaxy.webapps.tool_shed.util import container_util
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_util
from tool_shed.util import common_install_util
+from tool_shed.util import container_util
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
diff -r f02a75ce05b71457845c0e5f56f409047a841967 -r b8cf5887ad464707887aaf8381df19dbc67ac697 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -6,9 +6,9 @@
from galaxy import util
from galaxy import web
from galaxy.util import json
-from galaxy.webapps.tool_shed.util import container_util
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_util
+from tool_shed.util import container_util
from tool_shed.util import encoding_util
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Bowtie2: Added quotation marks to group selection and added basic tool test
by commits-noreply@bitbucket.org 07 Aug '13
by commits-noreply@bitbucket.org 07 Aug '13
07 Aug '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f02a75ce05b7/
Changeset: f02a75ce05b7
User: guerler
Date: 2013-08-07 20:17:43
Summary: Bowtie2: Added quotation marks to group selection and added basic tool test
Affected #: 4 files
diff -r f54e602ca23310b8a237af1388d7bc2a37047d34 -r f02a75ce05b71457845c0e5f56f409047a841967 test-data/bowtie2/phix_genome.fasta
--- /dev/null
+++ b/test-data/bowtie2/phix_genome.fasta
@@ -0,0 +1,78 @@
+>gi|9626372|ref|NC_001422.1| Enterobacteria phage phiX174, complete genome
+GAGTTTTATCGCTTCCATGACGCAGAAGTTAACACTTTCGGATATTTCTGATGAGTCGAAAAATTATCTT
+GATAAAGCAGGAATTACTACTGCTTGTTTACGAATTAAATCGAAGTGGACTGCTGGCGGAAAATGAGAAA
+ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTACTTTGCGACCTTTCGCCATCAACTAACGATTCTG
+TCAAAAACTGACGCGTTGGATGAGGAGAAGTGGCTTAATATGCTTGGCACGTTCGTCAAGGACTGGTTTA
+GATATGAGTCACATTTTGTTCATGGTAGAGATTCTCTTGTTGACATTTTAAAAGAGCGTGGATTACTATC
+TGAGTCCGATGCTGTTCAACCACTAATAGGTAAGAAATCATGAGTCAAGTTACTGAACAATCCGTACGTT
+TCCAGACCGCTTTGGCCTCTATTAAGCTCATTCAGGCTTCTGCCGTTTTGGATTTAACCGAAGATGATTT
+CGATTTTCTGACGAGTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTGCTCGTCGCTGCGTTGAGGCT
+TGCGTTTATGGTACGCTGGACTTTGTGGGATACCCTCGCTTTCCTGCTCCTGTTGAGTTTATTGCTGCCG
+TCATTGCTTATTATGTTCATCCCGTCAACATTCAAACGGCCTGTCTCATCATGGAAGGCGCTGAATTTAC
+GGAAAACATTATTAATGGCGTCGAGCGTCCGGTTAAAGCCGCTGAATTGTTCGCGTTTACCTTGCGTGTA
+CGCGCAGGAAACACTGACGTTCTTACTGACGCAGAAGAAAACGTGCGTCAAAAATTACGTGCGGAAGGAG
+TGATGTAATGTCTAAAGGTAAAAAACGTTCTGGCGCTCGCCCTGGTCGTCCGCAGCCGTTGCGAGGTACT
+AAAGGCAAGCGTAAAGGCGCTCGTCTTTGGTATGTAGGTGGTCAACAATTTTAATTGCAGGGGCTTCGGC
+CCCTTACTTGAGGATAAATTATGTCTAATATTCAAACTGGCGCCGAGCGTATGCCGCATGACCTTTCCCA
+TCTTGGCTTCCTTGCTGGTCAGATTGGTCGTCTTATTACCATTTCAACTACTCCGGTTATCGCTGGCGAC
+TCCTTCGAGATGGACGCCGTTGGCGCTCTCCGTCTTTCTCCATTGCGTCGTGGCCTTGCTATTGACTCTA
+CTGTAGACATTTTTACTTTTTATGTCCCTCATCGTCACGTTTATGGTGAACAGTGGATTAAGTTCATGAA
+GGATGGTGTTAATGCCACTCCTCTCCCGACTGTTAACACTACTGGTTATATTGACCATGCCGCTTTTCTT
+GGCACGATTAACCCTGATACCAATAAAATCCCTAAGCATTTGTTTCAGGGTTATTTGAATATCTATAACA
+ACTATTTTAAAGCGCCGTGGATGCCTGACCGTACCGAGGCTAACCCTAATGAGCTTAATCAAGATGATGC
+TCGTTATGGTTTCCGTTGCTGCCATCTCAAAAACATTTGGACTGCTCCGCTTCCTCCTGAGACTGAGCTT
+TCTCGCCAAATGACGACTTCTACCACATCTATTGACATTATGGGTCTGCAAGCTGCTTATGCTAATTTGC
+ATACTGACCAAGAACGTGATTACTTCATGCAGCGTTACCATGATGTTATTTCTTCATTTGGAGGTAAAAC
+CTCTTATGACGCTGACAACCGTCCTTTACTTGTCATGCGCTCTAATCTCTGGGCATCTGGCTATGATGTT
+GATGGAACTGACCAAACGTCGTTAGGCCAGTTTTCTGGTCGTGTTCAACAGACCTATAAACATTCTGTGC
+CGCGTTTCTTTGTTCCTGAGCATGGCACTATGTTTACTCTTGCGCTTGTTCGTTTTCCGCCTACTGCGAC
+TAAAGAGATTCAGTACCTTAACGCTAAAGGTGCTTTGACTTATACCGATATTGCTGGCGACCCTGTTTTG
+TATGGCAACTTGCCGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCTGGTGATTCGTCTAAGAAGT
+TTAAGATTGCTGAGGGTCAGTGGTATCGTTATGCGCCTTCGTATGTTTCTCCTGCTTATCACCTTCTTGA
+AGGCTTCCCATTCATTCAGGAACCGCCTTCTGGTGATTTGCAAGAACGCGTACTTATTCGCCACCATGAT
+TATGACCAGTGTTTCCAGTCCGTTCAGTTGTTGCAGTGGAATAGTCAGGTTAAATTTAATGTGACCGTTT
+ATCGCAATCTGCCGACCACTCGCGATTCAATCATGACTTCGTGATAAAAGATTGAGTGTGAGGTTATAAC
+GCCGAAGCGGTAAAAATTTTAATTTTTGCCGCTGAGGGGTTGACCAAGCGAAGCGCGGTAGGTTTTCTGC
+TTAGGAGTTTAATCATGTTTCAGACTTTTATTTCTCGCCATAATTCAAACTTTTTTTCTGATAAGCTGGT
+TCTCACTTCTGTTACTCCAGCTTCTTCGGCACCTGTTTTACAGACACCTAAAGCTACATCGTCAACGTTA
+TATTTTGATAGTTTGACGGTTAATGCTGGTAATGGTGGTTTTCTTCATTGCATTCAGATGGATACATCTG
+TCAACGCCGCTAATCAGGTTGTTTCTGTTGGTGCTGATATTGCTTTTGATGCCGACCCTAAATTTTTTGC
+CTGTTTGGTTCGCTTTGAGTCTTCTTCGGTTCCGACTACCCTCCCGACTGCCTATGATGTTTATCCTTTG
+AATGGTCGCCATGATGGTGGTTATTATACCGTCAAGGACTGTGTGACTATTGACGTCCTTCCCCGTACGC
+CGGGCAATAACGTTTATGTTGGTTTCATGGTTTGGTCTAACTTTACCGCTACTAAATGCCGCGGATTGGT
+TTCGCTGAATCAGGTTATTAAAGAGATTATTTGTCTCCAGCCACTTAAGTGAGGTGATTTATGTTTGGTG
+CTATTGCTGGCGGTATTGCTTCTGCTCTTGCTGGTGGCGCCATGTCTAAATTGTTTGGAGGCGGTCAAAA
+AGCCGCCTCCGGTGGCATTCAAGGTGATGTGCTTGCTACCGATAACAATACTGTAGGCATGGGTGATGCT
+GGTATTAAATCTGCCATTCAAGGCTCTAATGTTCCTAACCCTGATGAGGCCGCCCCTAGTTTTGTTTCTG
+GTGCTATGGCTAAAGCTGGTAAAGGACTTCTTGAAGGTACGTTGCAGGCTGGCACTTCTGCCGTTTCTGA
+TAAGTTGCTTGATTTGGTTGGACTTGGTGGCAAGTCTGCCGCTGATAAAGGAAAGGATACTCGTGATTAT
+CTTGCTGCTGCATTTCCTGAGCTTAATGCTTGGGAGCGTGCTGGTGCTGATGCTTCCTCTGCTGGTATGG
+TTGACGCCGGATTTGAGAATCAAAAAGAGCTTACTAAAATGCAACTGGACAATCAGAAAGAGATTGCCGA
+GATGCAAAATGAGACTCAAAAAGAGATTGCTGGCATTCAGTCGGCGACTTCACGCCAGAATACGAAAGAC
+CAGGTATATGCACAAAATGAGATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGCGTTGCGTCTATTA
+TGGAAAACACCAATCTTTCCAAGCAACAGCAGGTTTCCGAGATTATGCGCCAAATGCTTACTCAAGCTCA
+AACGGCTGGTCAGTATTTTACCAATGACCAAATCAAAGAAATGACTCGCAAGGTTAGTGCTGAGGTTGAC
+TTAGTTCATCAGCAAACGCAGAATCAGCGGTATGGCTCTTCTCATATTGGCGCTACTGCAAAGGATATTT
+CTAATGTCGTCACTGATGCTGCTTCTGGTGTGGTTGATATTTTTCATGGTATTGATAAAGCTGTTGCCGA
+TACTTGGAACAATTTCTGGAAAGACGGTAAAGCTGATGGTATTGGCTCTAATTTGTCTAGGAAATAACCG
+TCAGGATTGACACCCTCCCAATTGTATGTTTTCATGCCTCCAAATCTTGGAGGCTTTTTTATGGTTCGTT
+CTTATTACCCTTCTGAATGTCACGCTGATTATTTTGACTTTGAGCGTATCGAGGCTCTTAAACCTGCTAT
+TGAGGCTTGTGGCATTTCTACTCTTTCTCAATCCCCAATGCTTGGCTTCCATAAGCAGATGGATAACCGC
+ATCAAGCTCTTGGAAGAGATTCTGTCTTTTCGTATGCAGGGCGTTGAGTTCGATAATGGTGATATGTATG
+TTGACGGCCATAAGGCTGCTTCTGACGTTCGTGATGAGTTTGTATCTGTTACTGAGAAGTTAATGGATGA
+ATTGGCACAATGCTACAATGTGCTCCCCCAACTTGATATTAATAACACTATAGACCACCGCCCCGAAGGG
+GACGAAAAATGGTTTTTAGAGAACGAGAAGACGGTTACGCAGTTTTGCCGCAAGCTGGCTGCTGAACGCC
+CTCTTAAGGATATTCGCGATGAGTATAATTACCCCAAAAAGAAAGGTATTAAGGATGAGTGTTCAAGATT
+GCTGGAGGCCTCCACTATGAAATCGCGTAGAGGCTTTGCTATTCAGCGTTTGATGAATGCAATGCGACAG
+GCTCATGCTGATGGTTGGTTTATCGTTTTTGACACTCTCACGTTGGCTGACGACCGATTAGAGGCGTTTT
+ATGATAATCCCAATGCTTTGCGTGACTATTTTCGTGATATTGGTCGTATGGTTCTTGCTGCCGAGGGTCG
+CAAGGCTAATGATTCACACGCCGACTGCTATCAGTATTTTTGTGTGCCTGAGTATGGTACAGCTAATGGC
+CGTCTTCATTTCCATGCGGTGCACTTTATGCGGACACTTCCTACAGGTAGCGTTGACCCTAATTTTGGTC
+GTCGGGTACGCAATCGCCGCCAGTTAAATAGCTTGCAAAATACGTGGCCTTATGGTTACAGTATGCCCAT
+CGCAGTTCGCTACACGCAGGACGCTTTTTCACGTTCTGGTTGGTTGTGGCCTGTTGATGCTAAAGGTGAG
+CCGCTTAAAGCTACCAGTTATATGGCTGTTGGTTTCTATGTGGCTAAATACGTTAACAAAAAGTCAGATA
+TGGACCTTGCTGCTAAAGGTCTAGGAGCTAAAGAATGGAACAACTCACTAAAAACCAAGCTGTCGCTACT
+TCCCAAGAAGCTGTTCAGAATCAGAATGAGCCGCAACTTCGGGATGAAAATGCTCACAATGACAAATCTG
+TCCACGGAGTGCTTAATCCAACTTACCAAGCTGGGTTACGACGCGACGCCGTTCAACCAGATATTGAAGC
+AGAACGCAAAAAGAGAGATGAGATTGAGGCTGGGAAAAGTTACTGTAGCCGACGTTTTGGCGGCGCAACC
+TGTGACGACAAATCTGCTCAAATTTATGCGCGCTTCGATAAAAATGATTGGCGTATCCAACCTGCA
\ No newline at end of file
diff -r f54e602ca23310b8a237af1388d7bc2a37047d34 -r f02a75ce05b71457845c0e5f56f409047a841967 test-data/bowtie2/phix_mapped.bam
Binary file test-data/bowtie2/phix_mapped.bam has changed
diff -r f54e602ca23310b8a237af1388d7bc2a37047d34 -r f02a75ce05b71457845c0e5f56f409047a841967 test-data/bowtie2/phix_reads.fastq
--- /dev/null
+++ b/test-data/bowtie2/phix_reads.fastq
@@ -0,0 +1,260 @@
+@HWI-EAS210R_0001:4:10:890:1882#0/1
+AATCTCATCTCTCTTTTTGCGTTCTGCTTCAATATCTG
++HWI-EAS210R_0001:4:10:890:1882#0/1
+fcceeggggggggggggffghggggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:1010#0/1
+GTTGTTTCTGTTGGTGCTGATATTGCTTTTGATGCCGA
++HWI-EAS210R_0001:4:10:890:1010#0/1
+gggggggggggggghgggggggggfggggggggggggg
+@HWI-EAS210R_0001:4:10:890:1780#0/1
+GAGGCCTCCACTATGAAATCGCGTAGAGGCTTTGCTAT
++HWI-EAS210R_0001:4:10:890:1780#0/1
+ggfhggggggggggggggggghgggggfggggggdggg
+@HWI-EAS210R_0001:4:10:890:1348#0/1
+TTGAGCGTATCGAGGCTCTTAAACCTGCTATTGAGGCT
++HWI-EAS210R_0001:4:10:890:1348#0/1
+ggggggggggggggggggggggggghfggeggggggdg
+@HWI-EAS210R_0001:4:10:890:1707#0/1
+AAAAGAGATTGCTGGCATTCAGTCGGCGACTTCACGCC
++HWI-EAS210R_0001:4:10:890:1707#0/1
+gfgfgfgggggggfggeggggffgcgggdfggfggdgg
+@HWI-EAS210R_0001:4:10:890:1527#0/1
+CGTACTTATTCGCCACCATGATTATGACCAGTGTTTCC
++HWI-EAS210R_0001:4:10:890:1527#0/1
+gggggggggggggggggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:781#0/1
+CGGAAAACGAACAAGCGCAAGAGTAAACATAGTGCCAT
++HWI-EAS210R_0001:4:10:890:781#0/1
+gggggggggggggggggggggggggggggggghggggg
+@HWI-EAS210R_0001:4:10:890:568#0/1
+GTTTATCGCAATCTGCCGACCACTCGCGATTCAATCAT
++HWI-EAS210R_0001:4:10:890:568#0/1
+gggggggfgggggggggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:1365#0/1
+CCAACATAAACATTATTGCCCGGCGTACGAGGAAGGAC
++HWI-EAS210R_0001:4:10:890:1365#0/1
+eggggdggfghggggggggghgfgfegghfggfgggge
+@HWI-EAS210R_0001:4:10:890:161#0/1
+GTGATTATCTTGCTGCTGCATTTCCTGAGCTTAATGCT
++HWI-EAS210R_0001:4:10:890:161#0/1
+ggggfg_gggfegggfgeggaefefdbfddeedgcgg`
+@HWI-EAS210R_0001:4:10:890:1920#0/1
+TTTATCAATACCATGAAAAATATCAACCACACCAGAAG
++HWI-EAS210R_0001:4:10:890:1920#0/1
+gggggggggggggggggggggffggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:61#0/1
+GTCGCGTCGTAACCCAGCTTGGTAAGTTGGATTAAGCA
++HWI-EAS210R_0001:4:10:890:61#0/1
+eede[egfggfggggggggggggegggggfgggggggg
+@HWI-EAS210R_0001:4:10:890:1284#0/1
+AAGCGCAAGAGTAAACATAGTGCCATGCTCAGGAACAA
++HWI-EAS210R_0001:4:10:890:1284#0/1
+gggggggggggggggggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:208#0/1
+CAGACCTATAAACATTCTGTGCCGCGTTTCTTTGTTCC
++HWI-EAS210R_0001:4:10:890:208#0/1
+gggeggggggggggggggggggggggggggggddgggg
+@HWI-EAS210R_0001:4:10:890:859#0/1
+CAATAGATGTGGTAGAAGTCGTCATTTGGCGAGAAAGC
++HWI-EAS210R_0001:4:10:890:859#0/1
+ggggcgggggggggggdgfggggggggfggggghgggh
+@HWI-EAS210R_0001:4:10:890:1947#0/1
+TCAACGCCGCTAATCAGGTTGTTTCTGTTGGTGCTGAT
++HWI-EAS210R_0001:4:10:890:1947#0/1
+ggggggggggggggggggeggggggggggggfgfgggg
+@HWI-EAS210R_0001:4:10:890:416#0/1
+AATGTCTAAAGGTAAAAAACGTTCTGGCGCTCGCCCTG
++HWI-EAS210R_0001:4:10:890:416#0/1
+eggggggggggdcgggggfggfgggggfcgfggggggg
+@HWI-EAS210R_0001:4:10:890:654#0/1
+GCAGCAAGGTCCATATCTGACTTTTTGTTAACGTATTT
++HWI-EAS210R_0001:4:10:890:654#0/1
+ggggghggggghggggghgfggggfgdfffgggggggg
+@HWI-EAS210R_0001:4:10:890:269#0/1
+CTTGAAGGCTTCCCATTCATTCAGGAACCGCCTTCTGG
++HWI-EAS210R_0001:4:10:890:269#0/1
+eecaadggggggggggggggggdgg\ffffgggggggg
+@HWI-EAS210R_0001:4:10:890:657#0/1
+TACCAGCTTTAGCCATAGCACCAGAAACAAAACTAGGG
++HWI-EAS210R_0001:4:10:890:657#0/1
+ggggghggggffgggfggggggfggfggggfffgfggg
+@HWI-EAS210R_0001:4:10:890:1449#0/1
+TTCAAGATTGCTGGAGGCCTCCACTATGAAATCGCGTA
++HWI-EAS210R_0001:4:10:890:1449#0/1
+ggggggggggggggggfgggghgggggggggggghhfh
+@HWI-EAS210R_0001:4:10:890:305#0/1
+CACGTTGGCTGACGACCGATTAGAGGCGTTTTATGATA
++HWI-EAS210R_0001:4:10:890:305#0/1
+gggegggfgggfggggggdgggdggfgffggggggfhg
+@HWI-EAS210R_0001:4:10:890:1190#0/1
+AATAAGCAATGACGGCAGCAATAAACTCAACAGGAGCA
++HWI-EAS210R_0001:4:10:890:1190#0/1
+bbecb_gggggggggggggggggggcgggfVgggeggg
+@HWI-EAS210R_0001:4:10:890:1586#0/1
+ATTAGCTGTACCATACTCAGGCACACAAAAATACTGAT
++HWI-EAS210R_0001:4:10:890:1586#0/1
+eggWgaa^O[\`[\_]J_^][`W`\]K^BBBBBBBBBB
+@HWI-EAS210R_0001:4:10:890:617#0/1
+AAACGCAAGCCTCAACGCAGCGACGAGCACGAGAGCGG
++HWI-EAS210R_0001:4:10:890:617#0/1
+gggggggggggggggggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:1562#0/1
+GTGGACTGCTGGCGGAAAATGAGAAAATTCGACCTATC
++HWI-EAS210R_0001:4:10:890:1562#0/1
+hggffggggdgggggaageef`Waegggbgggcgddgg
+@HWI-EAS210R_0001:4:10:890:1410#0/1
+CATAAAAAAGCCTCCAAGATTTGGAGGCATGAAAACAT
++HWI-EAS210R_0001:4:10:890:1410#0/1
+\geedggfffcd\gd`\edeggY_Zda`gggXgadeeg
+@HWI-EAS210R_0001:4:10:890:465#0/1
+AGCGTTGACCCTAATTTTGGTCGTCGGGTACGCAATCG
++HWI-EAS210R_0001:4:10:890:465#0/1
+gggggfgggggggfgggeggggbgggggagf^ffefea
+@HWI-EAS210R_0001:4:10:890:1323#0/1
+ATGACAAGTAAAGGACGGTTGTCAGCGTCATAAGAGGT
++HWI-EAS210R_0001:4:10:890:1323#0/1
+gggggggfggdggggggggggggggggggggfgghggg
+@HWI-EAS210R_0001:4:10:890:1064#0/1
+TTTCATGGTATTGATAAAGCTGTTGCCGATACTTGGAA
++HWI-EAS210R_0001:4:10:890:1064#0/1
+gggggggcegggfgfggfgfgggdgggggghfggggdf
+@HWI-EAS210R_0001:4:10:890:637#0/1
+ATAGTGCCATGCTCAGGAACAAAGAAACGCGGCACAGA
++HWI-EAS210R_0001:4:10:890:637#0/1
+ggggggggggggggggggeggggggfddgggcgggggd
+@HWI-EAS210R_0001:4:10:890:1825#0/1
+TCCATCTGAATGCAATGAAGAAAACCACCATTACCAGC
++HWI-EAS210R_0001:4:10:890:1825#0/1
+gggggggghgggffgggggegggffgghgggggggggg
+@HWI-EAS210R_0001:4:10:890:1697#0/1
+ACGGTTAATGCTGGTAATGGTGGTTTTCTTCATTGCAT
++HWI-EAS210R_0001:4:10:890:1697#0/1
+eecaadeffedeggcabebeagggdecL_aNTTTTeg_
+@HWI-EAS210R_0001:4:10:890:1479#0/1
+ATTGCTGGCGACCCTGTTTTGTATGGCAACTTGCCGCC
++HWI-EAS210R_0001:4:10:890:1479#0/1
+ggggggggggggggggggggfggggggggghgbgggfg
+@HWI-EAS210R_0001:4:10:890:1838#0/1
+ATGTGACCGTTTATCGCAATCTGCCGACCACTCGCGAT
++HWI-EAS210R_0001:4:10:890:1838#0/1
+ggfgfggfggggggfgggggggaggggfegfhggdggg
+@HWI-EAS210R_0001:4:10:890:988#0/1
+CATTCAGTCGGCGACTTCACGCCAGAATACGAAAGACC
++HWI-EAS210R_0001:4:10:890:988#0/1
+ggghggfggggggegghgfggfgggggggggggggggg
+@HWI-EAS210R_0001:4:10:890:1902#0/1
+CTTCCTCGTACGCCGGGCAATAATGTTTATGTTGGTTT
++HWI-EAS210R_0001:4:10:890:1902#0/1
+gggggggegggggggggggggggggggggfgggggggg
+@HWI-EAS210R_0001:4:10:890:1983#0/1
+TTAAGGTACTGAATCTCTTTAGTCGCAGTAGGCGGAAA
++HWI-EAS210R_0001:4:10:890:1983#0/1
+ffdfbaQa_c_Y\]Yeceeeege_gYR^VS]ZW[\Lcc
+@HWI-EAS210R_0001:4:10:890:1058#0/1
+GCTCGCCCTGGTCGTCCGCAGCCGTTGCGAGGTACTAA
++HWI-EAS210R_0001:4:10:890:1058#0/1
+ggggdgggggggggggggggggggggggggggfggggg
+@HWI-EAS210R_0001:4:10:890:287#0/1
+GTATCCAACCTGCAGAGTTTTATCGCTTCCATGACGCA
++HWI-EAS210R_0001:4:10:890:287#0/1
+ggggggfcggggg^fbe``egggggggdgg`dddfggg
+@HWI-EAS210R_0001:4:10:890:335#0/1
+AAGAGGTTTTACCTCCAAATGAAGAAATAACATCATGG
++HWI-EAS210R_0001:4:10:890:335#0/1
+`^_``Q[cccggfgggggggbddg\dXdbegggddfg`
+@HWI-EAS210R_0001:4:10:891:149#0/1
+TTTAAGAGCCTCGATACGCTCAAAGTCAAAATAATCAG
++HWI-EAS210R_0001:4:10:891:149#0/1
+ggggghgggggghggggggggggggfgggggggffggg
+@HWI-EAS210R_0001:4:10:891:445#0/1
+ATATTTTTCATGGTATTGATAAAGCTGTTGCCGATACT
++HWI-EAS210R_0001:4:10:891:445#0/1
+gggggggddggggggggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:891:1476#0/1
+GGCGACTCCTTCGAGATGGACGCCGTTGGCGCTCTCCG
++HWI-EAS210R_0001:4:10:891:1476#0/1
+ggegggffghhggfhggfgeggffgggffggagfggfg
+@HWI-EAS210R_0001:4:10:891:323#0/1
+TGTAAAACAGGTGCCGAAGAAGCTGGAGTAACAGAAGT
++HWI-EAS210R_0001:4:10:891:323#0/1
+gggcgfefghgggghgfgfgggggggggegaaeeagge
+@HWI-EAS210R_0001:4:10:891:69#0/1
+CTTGGCTTCCTTGCTGGTCAGATTGGTCGTCTTATTAC
++HWI-EAS210R_0001:4:10:891:69#0/1
+gggefgggfgafP_Vdbacbgegebd`fbaeggaggQd
+@HWI-EAS210R_0001:4:10:891:1379#0/1
+GACCCTGTTTTGTATGGCAACTTGCCGCCGCGTGAAAT
++HWI-EAS210R_0001:4:10:891:1379#0/1
+gggggggggggggggggggggggggggggfgefggggg
+@HWI-EAS210R_0001:4:10:891:608#0/1
+CTTATGCTAATTTGCATACTGACCAAGAACGTGATTAC
++HWI-EAS210R_0001:4:10:891:608#0/1
+ggggggggggggggggggggggggfggggggggggggg
+@HWI-EAS210R_0001:4:10:891:1831#0/1
+GTGGATTACTATCTGAGTCCGATGCTGTTCAACCACTA
++HWI-EAS210R_0001:4:10:891:1831#0/1
+ggggggggggggggggghggggggggghgggggfgggg
+@HWI-EAS210R_0001:4:10:891:412#0/1
+TAAGAAATCATGAGTCAAGTTACTGAACAATCCGTACG
++HWI-EAS210R_0001:4:10:891:412#0/1
+gggggggggggggggggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:891:346#0/1
+AATTCATCCATTAACTTCTCATCAACATATACAAACTC
++HWI-EAS210R_0001:4:10:891:346#0/1
+^W][_b^be_cRgaggdgdgbL_^_\ZN\R]]`W]cYe
+@HWI-EAS210R_0001:4:10:891:174#0/1
+AGTGGAGGCCTCCAGCAATCTTGAACACTCATCCTTAA
++HWI-EAS210R_0001:4:10:891:174#0/1
+ggggggggggggggggggggggcgggggfgggghghcg
+@HWI-EAS210R_0001:4:10:891:1415#0/1
+ACCAACCATCAGCATGAGCCTGTCGCATTGCATTCATC
++HWI-EAS210R_0001:4:10:891:1415#0/1
+gggggggggggdghgggggggggggggggggggggggg
+@HWI-EAS210R_0001:4:10:891:705#0/1
+TTTCCGTTGCTGCCATCTCAAAAACATTTGGACTGCTC
++HWI-EAS210R_0001:4:10:891:705#0/1
+gfgggggggggfgggggggggggfgddgggggggeggg
+@HWI-EAS210R_0001:4:10:891:1398#0/1
+GCTGAACGCCCTCTTAAGGATATTCGCGATGAGTATAA
++HWI-EAS210R_0001:4:10:891:1398#0/1
+ebehggdhdfgffdda`e\ecgfggedgegeggdfdfg
+@HWI-EAS210R_0001:4:10:891:971#0/1
+TAGCTTTAAGCGGCTCACCTTTAGCATCAACAGGCCAC
++HWI-EAS210R_0001:4:10:891:971#0/1
+ggfgggghggggggfggfgghggfggggfghggffghg
+@HWI-EAS210R_0001:4:10:891:627#0/1
+GTCGCAGTAGGCGGAAAACGAACAAGCGCAAGAGTAAA
++HWI-EAS210R_0001:4:10:891:627#0/1
+dfdffaeaaagggg[bgggghgghgggfgg\efefggd
+@HWI-EAS210R_0001:4:10:891:1822#0/1
+GAGTAGTTGAAATGGTAATAAGACGACCAATCTGACCA
++HWI-EAS210R_0001:4:10:891:1822#0/1
+gggeggfggegggggdggggghgfgedgggggcfgdcg
+@HWI-EAS210R_0001:4:10:891:1103#0/1
+AAGGGTAATAAGAACGAACCATAAAAAAGCCTCCAAGA
++HWI-EAS210R_0001:4:10:891:1103#0/1
+ggeggfggggggggghggggggggggfffgdggggggb
+@HWI-EAS210R_0001:4:10:891:586#0/1
+TATGTCTAATATTCAAACTGGCGCCGAGCGTATGCCGC
++HWI-EAS210R_0001:4:10:891:586#0/1
+ggggggggggggegfgggfcgggggffggggggggggg
+@HWI-EAS210R_0001:4:10:891:1620#0/1
+TTTTGTGTGCCTGAGTATGGTACAGCTAATGGCCGTCT
++HWI-EAS210R_0001:4:10:891:1620#0/1
+gggggfggggghgggggghcgggdgggegggefgfT``
+@HWI-EAS210R_0001:4:10:891:42#0/1
+CTAAAGGCAAGCGTAAAGGCGCTCGTCTTTGGTATGTA
++HWI-EAS210R_0001:4:10:891:42#0/1
+ggefggggggfggggfaf_fggggdfggggfaYbfd]b
+@HWI-EAS210R_0001:4:10:891:1609#0/1
+GCCATAGCACCAGAAACAAAACTAGGGGCGGCCTCATC
++HWI-EAS210R_0001:4:10:891:1609#0/1
+gggghgggghfgghgggggfffggggcgggfdggfggf
+@HWI-EAS210R_0001:4:10:891:1028#0/1
+AATCGCGTAGAGGCTTTGCTATTCAGCGTTTGATGAAT
++HWI-EAS210R_0001:4:10:891:1028#0/1
+gfgggggggdgbggfgegeeggggggggdggegggddg
+@HWI-EAS210R_0001:4:10:891:902#0/1
+ATAAACATCATAGGCAGTCGGGAGGGTAGTCGGAACCG
++HWI-EAS210R_0001:4:10:891:902#0/1
+ggggggfgggggggggggggggggfddggggggggggg
diff -r f54e602ca23310b8a237af1388d7bc2a37047d34 -r f02a75ce05b71457845c0e5f56f409047a841967 tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -87,10 +87,10 @@
## read group information
#if str($read_group.selection) == "yes":
#if $read_group.rgid and $read_group.rglb and $read_group.rgpl and $read_group.rgsm:
- --rg-id $read_group.rgid
- --rg LB:$read_group.rglb
- --rg PL:$read_group.rgpl
- --rg SM:$read_group.rgsm
+ --rg-id "$read_group.rgid"
+ --rg "LB:$read_group.rglb"
+ --rg "PL:$read_group.rgpl"
+ --rg "SM:$read_group.rgsm"
#end if
#end if
@@ -254,6 +254,17 @@
</outputs><tests>
+ <test>
+ <!-- basic test on single paired default run -->
+ <param name="type" value="single"/>
+ <param name="selection" value="no"/>
+ <param name="full" value="no"/>
+ <param name="unaligned_file" value="false"/>
+ <param name="source" value="history" />
+ <param name="input_1" value="bowtie2/phix_reads.fastq" ftype="fastqsanger"/>
+ <param name="own_file" value="bowtie2/phix_genome.fasta" />
+ <output name="output" file="bowtie2/phix_mapped.bam" />
+ </test></tests><help>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e68eee8054d2/
Changeset: e68eee8054d2
Branch: sort
User: dannon
Date: 2013-08-07 19:36:25
Summary: Branch close
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1d91e253734e/
Changeset: 1d91e253734e
Branch: sort
User: ghuls
Date: 2013-05-28 15:48:45
Summary: Fix sorting of numbers in scientific notation.
The -n option of sort can't handle numbers in scientific notation.
The -g option of sort can handle those numbers correctly.
Affected #: 3 files
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 1d91e253734edaaafa8f97bb4382ce18fd02371d test-data/sort_in2.bed
--- /dev/null
+++ b/test-data/sort_in2.bed
@@ -0,0 +1,6 @@
+chr10 100 200 feature1 100.01 +
+chr20 800 900 feature2 1.1 +
+chr2 500 600 feature3 1000.1 +
+chr1 300 400 feature4 1.1e-05 +
+chr21 300 500 feature5 1.1e2 +
+chr15 700 800 feature6 1.1e4 +
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 1d91e253734edaaafa8f97bb4382ce18fd02371d test-data/sort_out3.bed
--- /dev/null
+++ b/test-data/sort_out3.bed
@@ -0,0 +1,6 @@
+chr1 300 400 feature4 1.1e-05 +
+chr20 800 900 feature2 1.1 +
+chr10 100 200 feature1 100.01 +
+chr21 300 500 feature5 1.1e2 +
+chr2 500 600 feature3 1000.1 +
+chr15 700 800 feature6 1.1e4 +
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 1d91e253734edaaafa8f97bb4382ce18fd02371d tools/filters/sorter.xml
--- a/tools/filters/sorter.xml
+++ b/tools/filters/sorter.xml
@@ -1,19 +1,37 @@
-<tool id="sort1" name="Sort" version="1.0.2">
+<tool id="sort1" name="Sort" version="1.0.3"><description>data in ascending or descending order</description><command interpreter="python">
sorter.py
- --input=$input
- --output=$out_file1
- #set $style = '' if (str($style) == 'alpha') else 'n'
+ --input=${input}
+ --output=${out_file1}
+
+ #if (str($style) == 'num'):
+ #set $style = 'n'
+ #elif (str($style) == 'gennum'):
+ #set $style = 'g'
+ #else:
+ #set $style = ''
+ #end if
+
#set $order = '' if (str($order) == 'ASC') else 'r'
- --key=$column,$column$style$order
+
+ --key=${column},${column}${style}${order}
+
#for $col in $column_set:
- #set $other_column = str($col.other_column)
- #set $other_style = '' if (str($col.other_style) == "alpha") else 'n'
- #set $other_order = '' if (str($col.other_order) == "ASC") else 'r'
- --key=$other_column,$other_column$other_style$other_order
+ #set $other_column = str($col.other_column)
+
+ #if (str($col.other_style) == 'num'):
+ #set $other_style = 'n'
+ #elif (str($col.other_style) == 'gennum'):
+ #set $other_style = 'g'
+ #else:
+ #set $other_style = ''
+ #end if
+
+ #set $other_order = '' if (str($col.other_order) == "ASC") else 'r'
+ --key=${other_column},${other_column}${other_style}${other_order}
#end for
</command><inputs>
@@ -21,6 +39,7 @@
<param name="column" label="on column" type="data_column" data_ref="input" accept_default="true"/><param name="style" type="select" label="with flavor"><option value="num">Numerical sort</option>
+ <option value="gennum">General numeric sort</option><option value="alpha">Alphabetical sort</option></param><param name="order" type="select" label="everything in">
@@ -31,6 +50,7 @@
<param name="other_column" label="on column" type="data_column" data_ref="input" accept_default="true" /><param name="other_style" type="select" label="with flavor"><option value="num">Numerical sort</option>
+ <option value="gennum">General numeric sort</option><option value="alpha">Alphabetical sort</option></param><param name="other_order" type="select" label="everything in">
@@ -63,6 +83,13 @@
<param name="other_order" value="ASC"/><output name="out_file1" file="sort_out2.bed"/></test>
+ <test>
+ <param name="input" value="sort_in2.bed"/>
+ <param name="column" value="5"/>
+ <param name="style" value="gennum"/>
+ <param name="order" value="ASC"/>
+ <output name="out_file1" file="sort_out3.bed"/>
+ </test></tests><help>
.. class:: infomark
@@ -75,8 +102,9 @@
This tool sorts the dataset on any number of columns in either ascending or descending order.
-* Numerical sort orders numbers by their magnitude, ignores all characters besides numbers, and evaluates a string of numbers to the value they signify.
-* Alphabetical sort is a phonebook type sort based on the conventional order of letters in an alphabet. Each nth letter is compared with the nth letter of other words in the list, starting at the first letter of each word and advancing to the second, third, fourth, and so on, until the order is established. Therefore, in an alphabetical sort, 2 comes after 100 (1 < 2).
+* **Numerical sort** orders numbers by their magnitude, ignores all characters besides numbers, and evaluates a string of numbers to the value they signify.
+* **General numeric sort** orders numbers by their general numerical value. Unlike the numerical sort option, it can handle numbers in scientific notation too.
+* **Alphabetical sort** is a phonebook type sort based on the conventional order of letters in an alphabet. Each nth letter is compared with the nth letter of other words in the list, starting at the first letter of each word and advancing to the second, third, fourth, and so on, until the order is established. Therefore, in an alphabetical sort, 2 comes after 100 (1 < 2).
-----
@@ -106,7 +134,7 @@
A g 10 H h 43
A g 4 I h 500
-on columns 1 (alpha), 3 (num), and 6 (num) in ascending order will yield::
+on columns 1 (alphabetical), 3 (numerical), and 6 (numerical) in ascending order will yield::
A kk 4 I h 111
A edf 4 tw b 234
@@ -127,5 +155,34 @@
Pd gf 7 Gthe de 567
Q d 7 II jhu 45
rS hty 90 YY LOp 89
+
+
+Sorting the following::
+
+ chr10 100 200 feature1 100.01 +
+ chr20 800 900 feature2 1.1 +
+ chr2 500 600 feature3 1000.1 +
+ chr1 300 400 feature4 1.1e-05 +
+ chr21 300 500 feature5 1.1e2 +
+ chr15 700 800 feature6 1.1e4 +
+
+on column 5 (numerical) in ascending order will yield::
+
+ chr1 300 400 feature4 1.1e-05 +
+ chr15 700 800 feature6 1.1e4 +
+ chr20 800 900 feature2 1.1 +
+ chr21 300 500 feature5 1.1e2 +
+ chr10 100 200 feature1 100.01 +
+ chr2 500 600 feature3 1000.1 +
+
+on column 5 (general numeric) in ascending order will yield::
+
+ chr1 300 400 feature4 1.1e-05 +
+ chr20 800 900 feature2 1.1 +
+ chr10 100 200 feature1 100.01 +
+ chr21 300 500 feature5 1.1e2 +
+ chr2 500 600 feature3 1000.1 +
+ chr15 700 800 feature6 1.1e4 +
+
</help></tool>
https://bitbucket.org/galaxy/galaxy-central/commits/f54e602ca233/
Changeset: f54e602ca233
User: dannon
Date: 2013-08-07 19:33:27
Summary: Merged in ghuls/galaxy-central/sort (pull request #172)
Fix sorting of numbers in scientific notation.
Affected #: 3 files
diff -r 3e6f22a221eb521dddcb13bb7c1a07b4b9551932 -r f54e602ca23310b8a237af1388d7bc2a37047d34 test-data/sort_in2.bed
--- /dev/null
+++ b/test-data/sort_in2.bed
@@ -0,0 +1,6 @@
+chr10 100 200 feature1 100.01 +
+chr20 800 900 feature2 1.1 +
+chr2 500 600 feature3 1000.1 +
+chr1 300 400 feature4 1.1e-05 +
+chr21 300 500 feature5 1.1e2 +
+chr15 700 800 feature6 1.1e4 +
diff -r 3e6f22a221eb521dddcb13bb7c1a07b4b9551932 -r f54e602ca23310b8a237af1388d7bc2a37047d34 test-data/sort_out3.bed
--- /dev/null
+++ b/test-data/sort_out3.bed
@@ -0,0 +1,6 @@
+chr1 300 400 feature4 1.1e-05 +
+chr20 800 900 feature2 1.1 +
+chr10 100 200 feature1 100.01 +
+chr21 300 500 feature5 1.1e2 +
+chr2 500 600 feature3 1000.1 +
+chr15 700 800 feature6 1.1e4 +
diff -r 3e6f22a221eb521dddcb13bb7c1a07b4b9551932 -r f54e602ca23310b8a237af1388d7bc2a37047d34 tools/filters/sorter.xml
--- a/tools/filters/sorter.xml
+++ b/tools/filters/sorter.xml
@@ -1,19 +1,37 @@
-<tool id="sort1" name="Sort" version="1.0.2">
+<tool id="sort1" name="Sort" version="1.0.3"><description>data in ascending or descending order</description><command interpreter="python">
sorter.py
- --input=$input
- --output=$out_file1
- #set $style = '' if (str($style) == 'alpha') else 'n'
+ --input=${input}
+ --output=${out_file1}
+
+ #if (str($style) == 'num'):
+ #set $style = 'n'
+ #elif (str($style) == 'gennum'):
+ #set $style = 'g'
+ #else:
+ #set $style = ''
+ #end if
+
#set $order = '' if (str($order) == 'ASC') else 'r'
- --key=$column,$column$style$order
+
+ --key=${column},${column}${style}${order}
+
#for $col in $column_set:
- #set $other_column = str($col.other_column)
- #set $other_style = '' if (str($col.other_style) == "alpha") else 'n'
- #set $other_order = '' if (str($col.other_order) == "ASC") else 'r'
- --key=$other_column,$other_column$other_style$other_order
+ #set $other_column = str($col.other_column)
+
+ #if (str($col.other_style) == 'num'):
+ #set $other_style = 'n'
+ #elif (str($col.other_style) == 'gennum'):
+ #set $other_style = 'g'
+ #else:
+ #set $other_style = ''
+ #end if
+
+ #set $other_order = '' if (str($col.other_order) == "ASC") else 'r'
+ --key=${other_column},${other_column}${other_style}${other_order}
#end for
</command><inputs>
@@ -21,6 +39,7 @@
<param name="column" label="on column" type="data_column" data_ref="input" accept_default="true"/><param name="style" type="select" label="with flavor"><option value="num">Numerical sort</option>
+ <option value="gennum">General numeric sort</option><option value="alpha">Alphabetical sort</option></param><param name="order" type="select" label="everything in">
@@ -31,6 +50,7 @@
<param name="other_column" label="on column" type="data_column" data_ref="input" accept_default="true" /><param name="other_style" type="select" label="with flavor"><option value="num">Numerical sort</option>
+ <option value="gennum">General numeric sort</option><option value="alpha">Alphabetical sort</option></param><param name="other_order" type="select" label="everything in">
@@ -63,6 +83,13 @@
<param name="other_order" value="ASC"/><output name="out_file1" file="sort_out2.bed"/></test>
+ <test>
+ <param name="input" value="sort_in2.bed"/>
+ <param name="column" value="5"/>
+ <param name="style" value="gennum"/>
+ <param name="order" value="ASC"/>
+ <output name="out_file1" file="sort_out3.bed"/>
+ </test></tests><help>
.. class:: infomark
@@ -75,8 +102,9 @@
This tool sorts the dataset on any number of columns in either ascending or descending order.
-* Numerical sort orders numbers by their magnitude, ignores all characters besides numbers, and evaluates a string of numbers to the value they signify.
-* Alphabetical sort is a phonebook type sort based on the conventional order of letters in an alphabet. Each nth letter is compared with the nth letter of other words in the list, starting at the first letter of each word and advancing to the second, third, fourth, and so on, until the order is established. Therefore, in an alphabetical sort, 2 comes after 100 (1 < 2).
+* **Numerical sort** orders numbers by their magnitude, ignores all characters besides numbers, and evaluates a string of numbers to the value they signify.
+* **General numeric sort** orders numbers by their general numerical value. Unlike the numerical sort option, it can handle numbers in scientific notation too.
+* **Alphabetical sort** is a phonebook type sort based on the conventional order of letters in an alphabet. Each nth letter is compared with the nth letter of other words in the list, starting at the first letter of each word and advancing to the second, third, fourth, and so on, until the order is established. Therefore, in an alphabetical sort, 2 comes after 100 (1 < 2).
-----
@@ -106,7 +134,7 @@
A g 10 H h 43
A g 4 I h 500
-on columns 1 (alpha), 3 (num), and 6 (num) in ascending order will yield::
+on columns 1 (alphabetical), 3 (numerical), and 6 (numerical) in ascending order will yield::
A kk 4 I h 111
A edf 4 tw b 234
@@ -127,5 +155,34 @@
Pd gf 7 Gthe de 567
Q d 7 II jhu 45
rS hty 90 YY LOp 89
+
+
+Sorting the following::
+
+ chr10 100 200 feature1 100.01 +
+ chr20 800 900 feature2 1.1 +
+ chr2 500 600 feature3 1000.1 +
+ chr1 300 400 feature4 1.1e-05 +
+ chr21 300 500 feature5 1.1e2 +
+ chr15 700 800 feature6 1.1e4 +
+
+on column 5 (numerical) in ascending order will yield::
+
+ chr1 300 400 feature4 1.1e-05 +
+ chr15 700 800 feature6 1.1e4 +
+ chr20 800 900 feature2 1.1 +
+ chr21 300 500 feature5 1.1e2 +
+ chr10 100 200 feature1 100.01 +
+ chr2 500 600 feature3 1000.1 +
+
+on column 5 (general numeric) in ascending order will yield::
+
+ chr1 300 400 feature4 1.1e-05 +
+ chr20 800 900 feature2 1.1 +
+ chr10 100 200 feature1 100.01 +
+ chr21 300 500 feature5 1.1e2 +
+ chr2 500 600 feature3 1000.1 +
+ chr15 700 800 feature6 1.1e4 +
+
</help></tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/caaf7db30954/
Changeset: caaf7db30954
User: jmchilton
Date: 2012-12-14 21:51:20
Summary: Allow display application parameters to be defined with allow_override="True". If parameter X is defined with allow_override="True", then if the dataset link contains the query parameter app_X="foo", the display parameter X will be set to foo regardless of the template text.
Affected #: 3 files
diff -r eae248415389203907b5b951f139a200024ae069 -r caaf7db309547d9633e9619c89b33fb7e90a92c4 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -57,14 +57,17 @@
rval[ key ] = value
rval[ DEFAULT_DATASET_NAME ] = data #always have the display dataset name available
return rval
- def build_parameter_dict( self, data, dataset_hash, user_hash, trans ):
+ def build_parameter_dict( self, data, dataset_hash, user_hash, trans, app_kwds ):
other_values = self.get_inital_values( data, trans )
other_values[ 'DATASET_HASH' ] = dataset_hash
other_values[ 'USER_HASH' ] = user_hash
for name, param in self.parameters.iteritems():
assert name not in other_values, "The display parameter '%s' has been defined more than once." % name
if param.ready( other_values ):
- other_values[ name ] = param.get_value( other_values, dataset_hash, user_hash, trans )#subsequent params can rely on this value
+ if name in app_kwds and param.allow_override:
+ other_values[ name ] = app_kwds[ name ]
+ else:
+ other_values[ name ] = param.get_value( other_values, dataset_hash, user_hash, trans )#subsequent params can rely on this value
else:
other_values[ name ] = None
return False, other_values #need to stop here, next params may need this value
@@ -120,13 +123,13 @@
return iter( self.links )
class PopulatedDisplayApplicationLink( object ):
- def __init__( self, display_application_link, data, dataset_hash, user_hash, trans ):
+ def __init__( self, display_application_link, data, dataset_hash, user_hash, trans, app_kwds ):
self.link = display_application_link
self.data = data
self.dataset_hash = dataset_hash
self.user_hash = user_hash
self.trans = trans
- self.ready, self.parameters = self.link.build_parameter_dict( self.data, self.dataset_hash, self.user_hash, trans )
+ self.ready, self.parameters = self.link.build_parameter_dict( self.data, self.dataset_hash, self.user_hash, trans, app_kwds )
def display_ready( self ):
return self.ready
def get_param_value( self, name ):
@@ -184,9 +187,9 @@
version = "1.0.0"
self.version = version
self.links = odict()
- def get_link( self, link_name, data, dataset_hash, user_hash, trans ):
+ def get_link( self, link_name, data, dataset_hash, user_hash, trans, app_kwds ):
#returns a link object with data knowledge to generate links
- return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans )
+ return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans, app_kwds )
def filter_by_dataset( self, data, trans ):
filtered = DisplayApplication( self.id, self.name, self.datatypes_registry, version = self.version )
for link_name, link_value in self.links.iteritems():
diff -r eae248415389203907b5b951f139a200024ae069 -r caaf7db309547d9633e9619c89b33fb7e90a92c4 lib/galaxy/datatypes/display_applications/parameters.py
--- a/lib/galaxy/datatypes/display_applications/parameters.py
+++ b/lib/galaxy/datatypes/display_applications/parameters.py
@@ -28,6 +28,7 @@
self.viewable = string_as_bool( elem.get( 'viewable', 'False' ) ) #only allow these to be viewed via direct url when explicitly set to viewable
self.strip = string_as_bool( elem.get( 'strip', 'False' ) )
self.strip_https = string_as_bool( elem.get( 'strip_https', 'False' ) )
+ self.allow_override = string_as_bool( elem.get( 'allow_override', 'False' ) ) # Passing query param app_<name>=<value> to dataset controller allows override if this is true.
def get_value( self, other_values, dataset_hash, user_hash, trans ):
raise Exception, 'Unimplemented'
def prepare( self, other_values, dataset_hash, user_hash, trans ):
@@ -126,7 +127,7 @@
def __init__( self, elem, link ):
DisplayApplicationParameter.__init__( self, elem, link )
- self.text = elem.text
+ self.text = elem.text or ''
def get_value( self, other_values, dataset_hash, user_hash, trans ):
value = fill_template( self.text, context = other_values )
if self.strip:
diff -r eae248415389203907b5b951f139a200024ae069 -r caaf7db309547d9633e9619c89b33fb7e90a92c4 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -719,6 +719,12 @@
@web.expose
def display_application( self, trans, dataset_id=None, user_id=None, app_name = None, link_name = None, app_action = None, action_param = None, **kwds ):
"""Access to external display applications"""
+ # Build list of parameters to pass in to display application logic (app_kwds)
+ app_kwds = {}
+ for name, value in dict(kwds).iteritems(): # clone kwds because we remove stuff as we go.
+ if name.startswith( "app_" ):
+ app_kwds[ name[ len( "app_" ): ] ] = value
+ del kwds[ name ]
if kwds:
log.debug( "Unexpected Keywords passed to display_application: %s" % kwds ) #route memory?
#decode ids
@@ -742,7 +748,7 @@
display_app = trans.app.datatypes_registry.display_applications.get( app_name )
assert display_app, "Unknown display application has been requested: %s" % app_name
dataset_hash, user_hash = encode_dataset_user( trans, data, user )
- display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans )
+ display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans, app_kwds )
assert display_link, "Unknown display link has been requested: %s" % link_name
if data.state == data.states.ERROR:
msg.append( ( 'This dataset is in an error state, you cannot view it at an external display application.', 'error' ) )
https://bitbucket.org/galaxy/galaxy-central/commits/3e6f22a221eb/
Changeset: 3e6f22a221eb
User: dannon
Date: 2013-08-07 19:27:58
Summary: Merged in jmchilton/galaxy-central-display-application-parameters (pull request #99)
Display Application Parameter Enhancements
Affected #: 3 files
diff -r 6e13df4bc3f0e73b69ba510cc626c62145b0a81c -r 3e6f22a221eb521dddcb13bb7c1a07b4b9551932 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -57,14 +57,17 @@
rval[ key ] = value
rval[ DEFAULT_DATASET_NAME ] = data #always have the display dataset name available
return rval
- def build_parameter_dict( self, data, dataset_hash, user_hash, trans ):
+ def build_parameter_dict( self, data, dataset_hash, user_hash, trans, app_kwds ):
other_values = self.get_inital_values( data, trans )
other_values[ 'DATASET_HASH' ] = dataset_hash
other_values[ 'USER_HASH' ] = user_hash
for name, param in self.parameters.iteritems():
assert name not in other_values, "The display parameter '%s' has been defined more than once." % name
if param.ready( other_values ):
- other_values[ name ] = param.get_value( other_values, dataset_hash, user_hash, trans )#subsequent params can rely on this value
+ if name in app_kwds and param.allow_override:
+ other_values[ name ] = app_kwds[ name ]
+ else:
+ other_values[ name ] = param.get_value( other_values, dataset_hash, user_hash, trans )#subsequent params can rely on this value
else:
other_values[ name ] = None
return False, other_values #need to stop here, next params may need this value
@@ -120,13 +123,13 @@
return iter( self.links )
class PopulatedDisplayApplicationLink( object ):
- def __init__( self, display_application_link, data, dataset_hash, user_hash, trans ):
+ def __init__( self, display_application_link, data, dataset_hash, user_hash, trans, app_kwds ):
self.link = display_application_link
self.data = data
self.dataset_hash = dataset_hash
self.user_hash = user_hash
self.trans = trans
- self.ready, self.parameters = self.link.build_parameter_dict( self.data, self.dataset_hash, self.user_hash, trans )
+ self.ready, self.parameters = self.link.build_parameter_dict( self.data, self.dataset_hash, self.user_hash, trans, app_kwds )
def display_ready( self ):
return self.ready
def get_param_value( self, name ):
@@ -184,9 +187,9 @@
version = "1.0.0"
self.version = version
self.links = odict()
- def get_link( self, link_name, data, dataset_hash, user_hash, trans ):
+ def get_link( self, link_name, data, dataset_hash, user_hash, trans, app_kwds ):
#returns a link object with data knowledge to generate links
- return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans )
+ return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans, app_kwds )
def filter_by_dataset( self, data, trans ):
filtered = DisplayApplication( self.id, self.name, self.datatypes_registry, version = self.version )
for link_name, link_value in self.links.iteritems():
diff -r 6e13df4bc3f0e73b69ba510cc626c62145b0a81c -r 3e6f22a221eb521dddcb13bb7c1a07b4b9551932 lib/galaxy/datatypes/display_applications/parameters.py
--- a/lib/galaxy/datatypes/display_applications/parameters.py
+++ b/lib/galaxy/datatypes/display_applications/parameters.py
@@ -28,6 +28,7 @@
self.viewable = string_as_bool( elem.get( 'viewable', 'False' ) ) #only allow these to be viewed via direct url when explicitly set to viewable
self.strip = string_as_bool( elem.get( 'strip', 'False' ) )
self.strip_https = string_as_bool( elem.get( 'strip_https', 'False' ) )
+ self.allow_override = string_as_bool( elem.get( 'allow_override', 'False' ) ) # Passing query param app_<name>=<value> to dataset controller allows override if this is true.
def get_value( self, other_values, dataset_hash, user_hash, trans ):
raise Exception, 'Unimplemented'
def prepare( self, other_values, dataset_hash, user_hash, trans ):
@@ -126,7 +127,7 @@
def __init__( self, elem, link ):
DisplayApplicationParameter.__init__( self, elem, link )
- self.text = elem.text
+ self.text = elem.text or ''
def get_value( self, other_values, dataset_hash, user_hash, trans ):
value = fill_template( self.text, context = other_values )
if self.strip:
diff -r 6e13df4bc3f0e73b69ba510cc626c62145b0a81c -r 3e6f22a221eb521dddcb13bb7c1a07b4b9551932 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -720,6 +720,12 @@
@web.expose
def display_application( self, trans, dataset_id=None, user_id=None, app_name = None, link_name = None, app_action = None, action_param = None, **kwds ):
"""Access to external display applications"""
+ # Build list of parameters to pass in to display application logic (app_kwds)
+ app_kwds = {}
+ for name, value in dict(kwds).iteritems(): # clone kwds because we remove stuff as we go.
+ if name.startswith( "app_" ):
+ app_kwds[ name[ len( "app_" ): ] ] = value
+ del kwds[ name ]
if kwds:
log.debug( "Unexpected Keywords passed to display_application: %s" % kwds ) #route memory?
#decode ids
@@ -743,7 +749,7 @@
display_app = trans.app.datatypes_registry.display_applications.get( app_name )
assert display_app, "Unknown display application has been requested: %s" % app_name
dataset_hash, user_hash = encode_dataset_user( trans, data, user )
- display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans )
+ display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans, app_kwds )
assert display_link, "Unknown display link has been requested: %s" % link_name
if data.state == data.states.ERROR:
msg.append( ( 'This dataset is in an error state, you cannot view it at an external display application.', 'error' ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: compile_templates.py: add option to save intermediate 'handlebars' files when compiling multiple template files
by commits-noreply@bitbucket.org 07 Aug '13
by commits-noreply@bitbucket.org 07 Aug '13
07 Aug '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6e13df4bc3f0/
Changeset: 6e13df4bc3f0
User: carlfeberhard
Date: 2013-08-07 18:26:42
Summary: compile_templates.py: add option to save intermediate 'handlebars' files when compiling multiple template files
Affected #: 1 file
diff -r aa9aeb756031a63f4cd7dc4e1b2d84b12522ffef -r 6e13df4bc3f0e73b69ba510cc626c62145b0a81c static/scripts/templates/compile_templates.py
--- a/static/scripts/templates/compile_templates.py
+++ b/static/scripts/templates/compile_templates.py
@@ -208,13 +208,15 @@
print "\nCall this script with the '-h' for more help"
# delete multi template intermediate files
- print "\nCleaning up intermediate multi-template template files:"
- for filename in multi_template_template_filenames:
- try:
- print 'removing', filename
- os.remove( filename )
- except Exception, exc:
- print exc
+ if options.remove_intermediate:
+ print "\nCleaning up intermediate multi-template template files:"
+ for filename in multi_template_template_filenames:
+ try:
+ print 'removing', filename
+ os.remove( filename )
+ except Exception, exc:
+ print exc
+
# ------------------------------------------------------------------------------
if __name__ == '__main__':
@@ -226,7 +228,10 @@
help=( 'indicates that files ending with the given string contain multiple '
+ 'templates and the script should break those into individual '
+ 'handlebars templates (defaults to "%s")' ) % DEFAULT_MULTI_EXT )
-
- ( options, args ) = optparser.parse_args()
+ optparser.add_option( '--no-remove', action='store_false', dest='remove_intermediate', default=True,
+ help=( 'remove intermediate *.handlebars files when using multiple template'
+ + 'files (defaults to "True")' ) )
+
+ ( options, args ) = optparser.parse_args()
sys.exit( main( options, args ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Visualization framework: add config files for built-in visualiztions
by commits-noreply@bitbucket.org 07 Aug '13
by commits-noreply@bitbucket.org 07 Aug '13
07 Aug '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/aa9aeb756031/
Changeset: aa9aeb756031
User: carlfeberhard
Date: 2013-08-07 17:48:25
Summary: Visualization framework: add config files for built-in visualiztions
Affected #: 4 files
diff -r 425272160a7f3764823e18ea395539c54c8e41fd -r aa9aeb756031a63f4cd7dc4e1b2d84b12522ffef config/plugins/visualizations/circster/config/circster.xml
--- /dev/null
+++ b/config/plugins/visualizations/circster/config/circster.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="circster">
+ <data_sources>
+ <data_source>
+ <model_class>HistoryDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ <to_param assign="hda">hda_ldda</to_param>
+ </data_source>
+ <data_source>
+ <model_class>LibraryDatasetDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ <to_param assign="ldda">hda_ldda</to_param>
+ </data_source>
+ </data_sources>
+ <params>
+ <param type="visualization">id</param>
+ <param type="hda_or_ldda">dataset_id</param>
+ <param_modifier type="string" modifies="dataset_id">hda_ldda</param_modifier>
+ <param type="dbkey">dbkey</param>
+ </params>
+ <!-- template_root and template are currently ignored for the 'built-in' visualizations -->
+ <template_root>webapps/galaxy/visualization</template_root>
+ <template>circster.mako</template>
+ <render_location>_top</render_location>
+</visualization>
diff -r 425272160a7f3764823e18ea395539c54c8e41fd -r aa9aeb756031a63f4cd7dc4e1b2d84b12522ffef config/plugins/visualizations/phyloviz/config/phyloviz.xml
--- /dev/null
+++ b/config/plugins/visualizations/phyloviz/config/phyloviz.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="phyloviz">
+ <data_sources>
+ <data_source>
+ <model_class>HistoryDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Newick</test>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Nexus</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ </data_source>
+ </data_sources>
+ <params>
+ <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+ <param type="integer" default="0">tree_index</param>
+ </params>
+ <!-- template_root and template are currently ignored for the 'built-in' visualizations -->
+ <template_root>webapps/galaxy/visualization</template_root>
+ <template>phyloviz.mako</template>
+ <render_location>_top</render_location>
+</visualization>
diff -r 425272160a7f3764823e18ea395539c54c8e41fd -r aa9aeb756031a63f4cd7dc4e1b2d84b12522ffef config/plugins/visualizations/sweepster/config/sweepster.xml
--- /dev/null
+++ b/config/plugins/visualizations/sweepster/config/sweepster.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="sweepster">
+ <data_sources>
+ <data_source>
+ <model_class>HistoryDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ <to_param assign="hda">hda_ldda</to_param>
+ </data_source>
+ <data_source>
+ <model_class>LibraryDatasetDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ <to_param assign="ldda">hda_ldda</to_param>
+ </data_source>
+ </data_sources>
+ <params>
+ <param type="visualization" var_name_in_template="viz">visualization</param>
+ <param type="hda_or_ldda" var_name_in_template="dataset">dataset_id</param>
+ <param_modifier type="string" modifies="dataset_id">hda_ldda</param_modifier>
+ </params>
+ <!-- template_root and template are currently ignored for the 'built-in' visualizations -->
+ <template_root>webapps/galaxy/visualization</template_root>
+ <template>sweepster.mako</template>
+ <render_location>_top</render_location>
+</visualization>
diff -r 425272160a7f3764823e18ea395539c54c8e41fd -r aa9aeb756031a63f4cd7dc4e1b2d84b12522ffef config/plugins/visualizations/trackster/config/trackster.xml
--- /dev/null
+++ b/config/plugins/visualizations/trackster/config/trackster.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="trackster">
+ <!--not tested yet -->
+ <data_sources>
+ <data_source>
+ <model_class>HistoryDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ <to_param assign="hda">hda_ldda</to_param>
+ <to_param param_attr="dbkey">dbkey</to_param>
+ </data_source>
+ <data_source>
+ <model_class>LibraryDatasetDatasetAssociation</model_class>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+ <to_param param_attr="id">dataset_id</to_param>
+ <to_param assign="ldda">hda_ldda</to_param>
+ </data_source>
+ </data_sources>
+ <params>
+ <param type="visualization">id</param>
+ <param type="dataset">dataset_id</param>
+ <param type="genome_region">genome_region</param>
+ <param type="dbkey">dbkey</param>
+ </params>
+ <!-- template_root and template are currently ignored for the 'built-in' visualizations -->
+ <template_root>webapps/galaxy/visualization/tracks</template_root>
+ <template>browser.mako</template>
+ <render_location>_top</render_location>
+</visualization>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Visualition framework: have 'has_dataprovider' test work on 'test_attr'
by commits-noreply@bitbucket.org 07 Aug '13
by commits-noreply@bitbucket.org 07 Aug '13
07 Aug '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/425272160a7f/
Changeset: 425272160a7f
User: carlfeberhard
Date: 2013-08-07 17:12:34
Summary: Visualition framework: have 'has_dataprovider' test work on 'test_attr'
Affected #: 1 file
diff -r f33c054d6d5b75ae545248d71ec559d74b4fa636 -r 425272160a7f3764823e18ea395539c54c8e41fd lib/galaxy/visualization/registry.py
--- a/lib/galaxy/visualization/registry.py
+++ b/lib/galaxy/visualization/registry.py
@@ -530,11 +530,10 @@
#TODO: wish we could take this further but it would mean passing in the datatypes_registry
test_fn = lambda o, result: isinstance( getter( o ), result )
- #TODO: needs cleanup - robustiosity-nessness
# does the object itself have a datatype attr and does that datatype have the given dataprovider
elif test_type == 'has_dataprovider':
- test_fn = lambda o, result: ( hasattr( o, 'datatype' )
- and o.datatype.has_dataprovider( result ) )
+ test_fn = lambda o, result: ( hasattr( getter( o ), 'has_dataprovider' )
+ and getter( o ).has_dataprovider( result ) )
# default to simple (string) equilavance (coercing the test_attr to a string)
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Fix for the download_binary action type incorrectly processing downloaded compressed files.
by commits-noreply@bitbucket.org 06 Aug '13
by commits-noreply@bitbucket.org 06 Aug '13
06 Aug '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f33c054d6d5b/
Changeset: f33c054d6d5b
User: Dave Bouvier
Date: 2013-08-07 03:21:10
Summary: Fix for the download_binary action type incorrectly processing downloaded compressed files.
Affected #: 3 files
diff -r 773941fd26a43c8522c3ff9977cfda5968f65505 -r f33c054d6d5b75ae545248d71ec559d74b4fa636 lib/tool_shed/galaxy_install/tool_dependencies/common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/common_util.py
@@ -71,23 +71,13 @@
__shellquote(env_shell_file_path))
return cmd
-def download_binary_from_url( url, work_dir, install_dir ):
+def download_binary( url, work_dir ):
'''
- Download a pre-compiled binary from the specified URL. If the downloaded file is an archive,
- extract it into install_dir and delete the archive.
+ Download a pre-compiled binary from the specified URL.
'''
downloaded_filename = os.path.split( url )[ -1 ]
- try:
- dir = url_download( work_dir, downloaded_filename, url, extract=True )
- downloaded_filepath = os.path.join( work_dir, downloaded_filename )
- if is_compressed( downloaded_filepath ):
- os.remove( downloaded_filepath )
- move_directory_files( current_dir=work_dir,
- source_dir=dir,
- destination_dir=install_dir )
- return True
- except HTTPError:
- return False
+ dir = url_download( work_dir, downloaded_filename, url, extract=False )
+ return downloaded_filename
def extract_tar( file_name, file_path ):
if isgzip( file_name ) or isbz2( file_name ):
diff -r 773941fd26a43c8522c3ff9977cfda5968f65505 -r f33c054d6d5b75ae545248d71ec559d74b4fa636 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -31,6 +31,15 @@
if int( version.split( "." )[ 0 ] ) < 1:
raise NotImplementedError( "Install Fabric version 1.0 or later." )
+def filter_actions_after_binary_installation( actions ):
+ '''Filter out actions that should not be processed if a binary download succeeded.'''
+ filtered_actions = []
+ for action in actions:
+ action_type, action_dict = action
+ if action_type in [ 'set_environment', 'chmod', 'download_binary' ]:
+ filtered_actions.append( action )
+ return filtered_actions
+
def handle_command( app, tool_dependency, install_dir, cmd, return_output=False ):
sa_session = app.model.context.current
with settings( warn_only=True ):
@@ -165,8 +174,6 @@
actions = actions_dict.get( 'actions', None )
filtered_actions = []
env_shell_file_paths = []
- # Default to false so that the install process will default to compiling.
- binary_found = False
if actions:
with make_tmp_dir() as work_dir:
with lcd( work_dir ):
@@ -174,20 +181,38 @@
# are currently only two supported processes; download_by_url and clone via a "shell_command" action type.
action_type, action_dict = actions[ 0 ]
if action_type == 'download_binary':
- # Eliminate the download_binary action so remaining actions can be processed correctly.
- filtered_actions = actions[ 1: ]
url = action_dict[ 'url' ]
+ # Get the target directory for this download, if the user has specified one. Default to the root of $INSTALL_DIR.
+ target_directory = action_dict.get( 'target_directory', None )
# Attempt to download a binary from the specified URL.
- log.debug( 'Attempting to download from %s', url )
- binary_found = common_util.download_binary_from_url( url, work_dir, install_dir )
- if binary_found:
- # If the attempt succeeded, set the action_type to binary_found, in order to skip any download_by_url or shell_command actions.
+ log.debug( 'Attempting to download from %s to %s', url, str( target_directory ) )
+ downloaded_filename = None
+ try:
+ downloaded_filename = common_util.download_binary( url, work_dir )
+ # Filter out any actions that are not download_binary, chmod, or set_environment.
+ filtered_actions = filter_actions_after_binary_installation( actions[ 1: ] )
+ # Set actions to the same, so that the current download_binary doesn't get re-run in the filtered actions below.
actions = filtered_actions
- action_type = 'binary_found'
- else:
+ except Exception, e:
+ log.exception( str( e ) )
# No binary exists, or there was an error downloading the binary from the generated URL. Proceed with the remaining actions.
- del actions[ 0 ]
- action_type, action_dict = actions[ 0 ]
+ filtered_actions = actions[ 1: ]
+ action_type, action_dict = filtered_actions[ 0 ]
+ # If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that
+ # any errors in the move step are correctly sent to the tool dependency error handler.
+ if downloaded_filename and os.path.exists( os.path.join( work_dir, downloaded_filename ) ):
+ if target_directory:
+ target_directory = os.path.realpath( os.path.normpath( os.path.join( install_dir, target_directory ) ) )
+ # Make sure the target directory is not outside of $INSTALL_DIR.
+ if target_directory.startswith( os.path.realpath( install_dir ) ):
+ full_path_to_dir = os.path.abspath( os.path.join( install_dir, target_directory ) )
+ else:
+ full_path_to_dir = os.path.abspath( install_dir )
+ else:
+ full_path_to_dir = os.path.abspath( install_dir )
+ common_util.move_file( current_dir=work_dir,
+ source=downloaded_filename,
+ destination_dir=full_path_to_dir )
if action_type == 'download_by_url':
# Eliminate the download_by_url action so remaining actions can be processed correctly.
filtered_actions = actions[ 1: ]
@@ -237,9 +262,6 @@
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
with lcd( current_dir ):
action_type, action_dict = action_tup
- # If a binary was found, we only need to process environment variables, file permissions, and any other binary downloads.
- if binary_found and action_type not in [ 'set_environment', 'chmod', 'download_binary' ]:
- continue
if action_type == 'make_directory':
common_util.make_directory( full_path=action_dict[ 'full_path' ] )
elif action_type == 'move_directory_files':
@@ -374,12 +396,26 @@
os.chmod( target_file, mode )
elif action_type == 'download_binary':
url = action_dict[ 'url' ]
- binary_found = common_util.download_binary_from_url( url, work_dir, install_dir )
- if binary_found:
- log.debug( 'Successfully downloaded binary from %s', url )
- else:
- log.error( 'Unable to download binary from %s', url )
-
+ target_directory = action_dict.get( 'target_directory', None )
+ try:
+ downloaded_filename = common_util.download_binary( url, work_dir )
+ except Exception, e:
+ log.exception( str( e ) )
+ # If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that
+ # any errors in the move step are correctly sent to the tool dependency error handler.
+ if downloaded_filename and os.path.exists( os.path.join( work_dir, downloaded_filename ) ):
+ if target_directory:
+ target_directory = os.path.realpath( os.path.normpath( os.path.join( install_dir, target_directory ) ) )
+ # Make sure the target directory is not outside of $INSTALL_DIR.
+ if target_directory.startswith( os.path.realpath( install_dir ) ):
+ full_path_to_dir = os.path.abspath( os.path.join( install_dir, target_directory ) )
+ else:
+ full_path_to_dir = os.path.abspath( install_dir )
+ else:
+ full_path_to_dir = os.path.abspath( install_dir )
+ common_util.move_file( current_dir=work_dir,
+ source=downloaded_filename,
+ destination_dir=full_path_to_dir )
def log_results( command, fabric_AttributeString, file_path ):
"""
diff -r 773941fd26a43c8522c3ff9977cfda5968f65505 -r f33c054d6d5b75ae545248d71ec559d74b4fa636 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -395,6 +395,7 @@
else:
url_template_elem = url_template_elems[ 0 ]
action_dict[ 'url' ] = Template( url_template_elem.text ).safe_substitute( platform_info_dict )
+ action_dict[ 'target_directory' ] = action_elem.get( 'target_directory', None )
elif action_type == 'shell_command':
# <action type="shell_command">make</action>
action_elem_text = evaluate_template( action_elem.text )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0