galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
July 2014
- 1 participants
- 146 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/04a53a8605f1/
Changeset: 04a53a8605f1
Branch: stable
User: nsoranzo
Date: 2014-07-08 17:58:10
Summary: Fix seconds_to_str().
Affected #: 1 file
diff -r c4519bae84d32d638d8fe168c6fbcab40f4e8449 -r 04a53a8605f18d28e4e084bc8352885611a87876 lib/galaxy/jobs/metrics/formatting.py
--- a/lib/galaxy/jobs/metrics/formatting.py
+++ b/lib/galaxy/jobs/metrics/formatting.py
@@ -15,4 +15,4 @@
elif value < 3600:
return "%s minutes" % ( value / 60 )
else:
- return "%s days and %s minutes" % ( value / 3600, ( value % 3600 ) / 60 )
+ return "%s hours and %s minutes" % ( value / 3600, ( value % 3600 ) / 60 )
https://bitbucket.org/galaxy/galaxy-central/commits/b1441a96b840/
Changeset: b1441a96b840
Branch: stable
User: dannon
Date: 2014-07-08 19:47:15
Summary: Merged in nsoranzo/galaxy-central/stable (pull request #436)
Fix seconds_to_str().
Affected #: 1 file
diff -r d9ab8058b2748de6b4b1a17646c2d7456d709877 -r b1441a96b8401fd14ab3364c5d6dfa0733df263e lib/galaxy/jobs/metrics/formatting.py
--- a/lib/galaxy/jobs/metrics/formatting.py
+++ b/lib/galaxy/jobs/metrics/formatting.py
@@ -15,4 +15,4 @@
elif value < 3600:
return "%s minutes" % ( value / 60 )
else:
- return "%s days and %s minutes" % ( value / 3600, ( value % 3600 ) / 60 )
+ return "%s hours and %s minutes" % ( value / 3600, ( value % 3600 ) / 60 )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
6 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/21b53ebad211/
Changeset: 21b53ebad211
Branch: stable
User: jmchilton
Date: 2014-06-29 20:24:32
Summary: Spelling fix in method name.
Affected #: 2 files
diff -r 65e5e5b72893445eef5c4d2f43350df72e89b436 -r 21b53ebad2119dd9968c72f67b53d069ebc17902 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2874,7 +2874,7 @@
Find any additional datasets generated by a tool and attach (for
cases where number of outputs is not known in advance).
"""
- return output_collect.collect_primary_datatasets( self, output, job_working_directory )
+ return output_collect.collect_primary_datasets( self, output, job_working_directory )
def to_dict( self, trans, link_details=False, io_details=False ):
""" Returns dict of tool. """
diff -r 65e5e5b72893445eef5c4d2f43350df72e89b436 -r 21b53ebad2119dd9968c72f67b53d069ebc17902 lib/galaxy/tools/parameters/output_collect.py
--- a/lib/galaxy/tools/parameters/output_collect.py
+++ b/lib/galaxy/tools/parameters/output_collect.py
@@ -14,7 +14,7 @@
DEFAULT_EXTRA_FILENAME_PATTERN = r"primary_DATASET_ID_(?P<designation>[^_]+)_(?P<visible>[^_]+)_(?P<ext>[^_]+)(_(?P<dbkey>[^_]+))?"
-def collect_primary_datatasets( tool, output, job_working_directory ):
+def collect_primary_datasets( tool, output, job_working_directory ):
app = tool.app
sa_session = tool.sa_session
new_primary_datasets = {}
https://bitbucket.org/galaxy/galaxy-central/commits/c14bdcb9ae96/
Changeset: c14bdcb9ae96
Branch: stable
User: jmchilton
Date: 2014-06-29 20:24:32
Summary: Allow discovered datasets to use input data format in 'ext' definition.
Affected #: 4 files
diff -r 21b53ebad2119dd9968c72f67b53d069ebc17902 -r c14bdcb9ae9643525c0797090f293030ea899948 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1065,6 +1065,13 @@
out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
inp_data.update( [ ( da.name, da.dataset ) for da in job.input_library_datasets ] )
out_data.update( [ ( da.name, da.dataset ) for da in job.output_library_datasets ] )
+ input_ext = 'data'
+ for _, data in inp_data.items():
+ # For loop odd, but sort simulating behavior in galaxy.tools.actions
+ if not data:
+ continue
+ input_ext = data.ext
+
param_dict = dict( [ ( p.name, p.value ) for p in job.parameters ] ) # why not re-use self.param_dict here? ##dunno...probably should, this causes tools.parameters.basic.UnvalidatedValue to be used in following methods instead of validated and transformed values during i.e. running workflows
param_dict = self.tool.params_from_strings( param_dict, self.app )
# Check for and move associated_files
@@ -1075,7 +1082,7 @@
# Create generated output children and primary datasets and add to param_dict
collected_datasets = {
'children': self.tool.collect_child_datasets(out_data, self.working_directory),
- 'primary': self.tool.collect_primary_datasets(out_data, self.working_directory)
+ 'primary': self.tool.collect_primary_datasets(out_data, self.working_directory, input_ext)
}
param_dict.update({'__collected_datasets__': collected_datasets})
# Certain tools require tasks to be completed after job execution
diff -r 21b53ebad2119dd9968c72f67b53d069ebc17902 -r c14bdcb9ae9643525c0797090f293030ea899948 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2869,12 +2869,12 @@
self.sa_session.flush()
return children
- def collect_primary_datasets( self, output, job_working_directory ):
+ def collect_primary_datasets( self, output, job_working_directory, input_ext ):
"""
Find any additional datasets generated by a tool and attach (for
cases where number of outputs is not known in advance).
"""
- return output_collect.collect_primary_datasets( self, output, job_working_directory )
+ return output_collect.collect_primary_datasets( self, output, job_working_directory, input_ext )
def to_dict( self, trans, link_details=False, io_details=False ):
""" Returns dict of tool. """
diff -r 21b53ebad2119dd9968c72f67b53d069ebc17902 -r c14bdcb9ae9643525c0797090f293030ea899948 lib/galaxy/tools/parameters/output_collect.py
--- a/lib/galaxy/tools/parameters/output_collect.py
+++ b/lib/galaxy/tools/parameters/output_collect.py
@@ -14,7 +14,7 @@
DEFAULT_EXTRA_FILENAME_PATTERN = r"primary_DATASET_ID_(?P<designation>[^_]+)_(?P<visible>[^_]+)_(?P<ext>[^_]+)(_(?P<dbkey>[^_]+))?"
-def collect_primary_datasets( tool, output, job_working_directory ):
+def collect_primary_datasets( tool, output, job_working_directory, input_ext ):
app = tool.app
sa_session = tool.sa_session
new_primary_datasets = {}
@@ -66,6 +66,8 @@
designation = fields_match.designation
visible = fields_match.visible
ext = fields_match.ext
+ if ext == "input":
+ ext = input_ext
dbkey = fields_match.dbkey
# Create new primary dataset
primary_data = app.model.HistoryDatasetAssociation( extension=ext,
diff -r 21b53ebad2119dd9968c72f67b53d069ebc17902 -r c14bdcb9ae9643525c0797090f293030ea899948 test/functional/tools/multi_output_configured.xml
--- a/test/functional/tools/multi_output_configured.xml
+++ b/test/functional/tools/multi_output_configured.xml
@@ -8,19 +8,24 @@
echo "1" > subdir2/CUSTOM_1.txt;
echo "2" > subdir2/CUSTOM_2.tabular;
echo "3" > subdir2/CUSTOM_3.txt;
+ mkdir subdir3;
+ echo "Foo" > subdir3/Foo;
</command><inputs>
- <param name="input" type="integer" value="7" />
+ <param name="num_param" type="integer" value="7" />
+ <param name="input" type="data" /></inputs><outputs><data format="txt" name="report"><discover_datasets pattern="__designation_and_ext__" directory="subdir1" /><discover_datasets pattern="CUSTOM_(?P<designation>.+)\.(?P<ext>.+)" directory="subdir2" />
+ <discover_datasets pattern="__designation__" directory="subdir3" ext="input" /></data></outputs><tests><test>
- <param name="input" value="7" />
+ <param name="num_param" value="7" />
+ <param name="input" ftype="txt" value="simple_line.txt"/><output name="report"><assert_contents><has_line line="Hello" />
@@ -37,6 +42,9 @@
<discovered_dataset designation="2" ftype="tabular"><assert_contents><has_line line="2" /></assert_contents></discovered_dataset>
+ <discovered_dataset designation="Foo" ftype="txt">
+ <assert_contents><has_line line="Foo" /></assert_contents>
+ </discovered_dataset></output></test></tests>
https://bitbucket.org/galaxy/galaxy-central/commits/b6fae2cd673c/
Changeset: b6fae2cd673c
User: davebgx
Date: 2014-07-08 19:17:22
Summary: Remove deprecated "data admin" code, since the functionality was implemented better with Dan's data manager framework.
Affected #: 14 files
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -12,7 +12,6 @@
from galaxy.visualization.data_providers.registry import DataProviderRegistry
from galaxy.visualization.registry import VisualizationsRegistry
from galaxy.tools.imp_exp import load_history_imp_exp_tools
-from galaxy.tools.genome_index import load_genome_index_tools
from galaxy.sample_tracking import external_service_types
from galaxy.openid.providers import OpenIDProviders
from galaxy.tools.data_manager.manager import DataManagers
@@ -93,8 +92,6 @@
self.datatypes_registry.load_external_metadata_tool( self.toolbox )
# Load history import/export tools.
load_history_imp_exp_tools( self.toolbox )
- # Load genome indexer tool.
- load_genome_index_tools( self.toolbox )
# visualizations registry: associates resources with visualizations, controls how to render
self.visualizations_registry = None
if self.config.visualization_plugins_directory:
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1192,9 +1192,6 @@
param_dict = self.tool.params_from_strings( param_dict, self.app )
# Check for and move associated_files
self.tool.collect_associated_files(out_data, self.working_directory)
- gitd = self.sa_session.query( model.GenomeIndexToolData ).filter_by( job=job ).first()
- if gitd:
- self.tool.collect_associated_files({'': gitd}, self.working_directory)
# Create generated output children and primary datasets and add to param_dict
collected_datasets = {
'children': self.tool.collect_child_datasets(out_data, self.working_directory),
@@ -1248,7 +1245,6 @@
self.external_output_metadata.cleanup_external_metadata( self.sa_session )
galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job()
- galaxy.tools.genome_index.GenomeIndexToolWrapper( self.job_id ).postprocessing( self.sa_session, self.app )
if delete_files:
self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, extra_dir=str(self.job_id))
except:
@@ -1351,10 +1347,8 @@
dataset_path_rewriter = self.dataset_path_rewriter
job = self.get_job()
- # Job output datasets are combination of history, library, jeha and gitd datasets.
+ # Job output datasets are combination of history, library, and jeha datasets.
special = self.sa_session.query( model.JobExportHistoryArchive ).filter_by( job=job ).first()
- if not special:
- special = self.sa_session.query( model.GenomeIndexToolData ).filter_by( job=job ).first()
false_path = None
results = []
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/jobs/deferred/genome_index.py
--- a/lib/galaxy/jobs/deferred/genome_index.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""
-Module for managing genome transfer jobs.
-"""
-from __future__ import with_statement
-
-import logging, shutil, gzip, bz2, zipfile, tempfile, tarfile, sys, os
-
-from galaxy import eggs
-from sqlalchemy import and_
-from data_transfer import *
-
-log = logging.getLogger( __name__ )
-
-__all__ = [ 'GenomeIndexPlugin' ]
-
-class GenomeIndexPlugin( DataTransfer ):
-
- def __init__( self, app ):
- super( GenomeIndexPlugin, self ).__init__( app )
- self.app = app
- self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
- self.sa_session = app.model.context.current
-
- def create_job( self, trans, path, indexes, dbkey, intname ):
- params = dict( user=trans.user.id, path=path, indexes=indexes, dbkey=dbkey, intname=intname )
- deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'GenomeIndexPlugin', params = params )
- self.sa_session.add( deferred )
- self.sa_session.flush()
- log.debug( 'Job created, id %d' % deferred.id )
- return deferred.id
-
- def check_job( self, job ):
- log.debug( 'Job check' )
- return 'ready'
-
- def run_job( self, job ):
- incoming = dict( path=os.path.abspath( job.params[ 'path' ] ), indexer=job.params[ 'indexes' ][0], user=job.params[ 'user' ] )
- indexjob = self.tool.execute( self, set_output_hid=False, history=None, incoming=incoming, transfer=None, deferred=job )
- job.params[ 'indexjob' ] = indexjob[0].id
- job.state = self.app.model.DeferredJob.states.RUNNING
- self.sa_session.add( job )
- self.sa_session.flush()
- return self.app.model.DeferredJob.states.RUNNING
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ /dev/null
@@ -1,250 +0,0 @@
-"""
-Module for managing genome transfer jobs.
-"""
-from __future__ import with_statement
-
-import logging, shutil, gzip, bz2, zipfile, tempfile, tarfile, sys
-
-from galaxy import eggs
-from sqlalchemy import and_
-
-from galaxy.util.odict import odict
-from galaxy.workflow.modules import module_factory
-from galaxy.jobs.actions.post import ActionBox
-
-from galaxy.tools.parameters import visit_input_values
-from galaxy.tools.parameters.basic import DataToolParameter
-from galaxy.tools.data import ToolDataTableManager
-
-from galaxy.datatypes.checkers import *
-from galaxy.datatypes.sequence import Fasta
-from data_transfer import *
-
-log = logging.getLogger( __name__ )
-
-__all__ = [ 'GenomeTransferPlugin' ]
-
-class GenomeTransferPlugin( DataTransfer ):
-
- locations = {}
-
- def __init__( self, app ):
- super( GenomeTransferPlugin, self ).__init__( app )
- self.app = app
- self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
- self.sa_session = app.model.context.current
- tdtman = ToolDataTableManager( app.config.tool_data_path )
- xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
- for node in xmltree:
- table = node.get('name')
- location = node.findall('file')[0].get('path')
- self.locations[table] = location
-
- def create_job( self, trans, url, dbkey, intname, indexes ):
- job = trans.app.transfer_manager.new( protocol='http', url=url )
- params = dict( user=trans.user.id, transfer_job_id=job.id, protocol='http', type='init_transfer', url=url, dbkey=dbkey, indexes=indexes, intname=intname, liftover=None )
- deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'GenomeTransferPlugin', params = params )
- self.sa_session.add( deferred )
- self.sa_session.flush()
- return deferred.id
-
- def check_job( self, job ):
- if job.params['type'] == 'init_transfer':
- if not hasattr(job, 'transfer_job'):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
- if job.transfer_job.state == 'done':
- transfer = job.transfer_job
- transfer.state = 'downloaded'
- job.params['type'] = 'extract_transfer'
- self.sa_session.add( job )
- self.sa_session.add( transfer )
- self.sa_session.flush()
- return self.job_states.READY
- elif job.transfer_job.state == 'running':
- return self.job_states.WAIT
- elif job.transfer_job.state == 'new':
- assert job.params[ 'protocol' ] in [ 'http', 'ftp', 'https' ], 'Unknown protocol %s' % job.params[ 'protocol' ]
- self.app.transfer_manager.run( job.transfer_job )
- self.sa_session.add( job.transfer_job )
- self.sa_session.flush()
- return self.job_states.WAIT
- else:
- log.error( "An error occurred while downloading from %s" % job.params[ 'url' ] )
- return self.job_states.INVALID
- elif job.params[ 'type' ] == 'extract_transfer':
- return self.job_states.READY
-
- def get_job_status( self, jobid ):
- job = self.sa_session.query( self.app.model.DeferredJob ).get( int( jobid ) )
- if 'transfer_job_id' in job.params:
- if not hasattr( job, 'transfer_job' ):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
- return job
-
- def run_job( self, job ):
- params = job.params
- dbkey = params[ 'dbkey' ]
- if not hasattr( job, 'transfer_job' ):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
- transfer = job.transfer_job
- if params[ 'type' ] == 'extract_transfer':
- CHUNK_SIZE = 2**20
- destpath = os.path.join( self.app.config.get( 'genome_data_path', 'tool-data/genome' ), job.params[ 'dbkey' ], 'seq' )
- destfile = '%s.fa' % job.params[ 'dbkey' ]
- destfilepath = os.path.join( destpath, destfile )
- tmpprefix = '%s_%s_download_unzip_' % ( job.params['dbkey'], job.params[ 'transfer_job_id' ] )
- tmppath = os.path.dirname( os.path.abspath( transfer.path ) )
- if not os.path.exists( destpath ):
- os.makedirs( destpath )
- protocol = job.params[ 'protocol' ]
- data_type = self._check_compress( transfer.path )
- if data_type is None:
- sniffer = Fasta()
- if sniffer.sniff( transfer.path ):
- data_type = 'fasta'
- fd, uncompressed = tempfile.mkstemp( prefix=tmpprefix, dir=tmppath, text=False )
- if data_type in [ 'tar.gzip', 'tar.bzip' ]:
- fp = open( transfer.path, 'r' )
- tar = tarfile.open( mode = 'r:*', bufsize = CHUNK_SIZE, fileobj = fp )
- files = tar.getmembers()
- for filename in files:
- z = tar.extractfile(filename)
- while 1:
- try:
- chunk = z.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- log.error( 'Problem decompressing compressed data' )
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
- os.write( fd, '\n' )
- os.close( fd )
- tar.close()
- fp.close()
- elif data_type == 'gzip':
- compressed = gzip.open( transfer.path, mode = 'rb' )
- while 1:
- try:
- chunk = compressed.read( CHUNK_SIZE )
- except IOError:
- compressed.close()
- log.error( 'Problem decompressing compressed data' )
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- compressed.close()
- elif data_type == 'bzip':
- compressed = bz2.BZ2File( transfer.path, mode = 'r' )
- while 1:
- try:
- chunk = compressed.read( CHUNK_SIZE )
- except IOError:
- compressed.close()
- log.error( 'Problem decompressing compressed data' )
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- compressed.close()
- elif data_type == 'zip':
- uncompressed_name = None
- unzipped = False
- z = zipfile.ZipFile( transfer.path )
- z.debug = 3
- for name in z.namelist():
- if name.endswith('/'):
- continue
- zipped_file = z.open( name )
- while 1:
- try:
- chunk = zipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- log.error( 'Problem decompressing zipped data' )
- return self.app.model.DeferredJob.states.INVALID
- if not chunk:
- break
- os.write( fd, chunk )
- zipped_file.close()
- os.close( fd )
- z.close()
- elif data_type == 'fasta':
- uncompressed = transfer.path
- else:
- job.state = self.app.model.DeferredJob.states.INVALID
- log.error( "Unrecognized compression format for file %s." % transfer.path )
- self.sa_session.add( job )
- self.sa_session.flush()
- return
- shutil.move( uncompressed, destfilepath )
- if os.path.exists( transfer.path ):
- os.remove( transfer.path )
- os.chmod( destfilepath, 0644 )
- fastaline = '\t'.join( [ dbkey, dbkey, params[ 'intname' ], os.path.abspath( destfilepath ) ] )
- self._add_line( 'all_fasta', fastaline )
- if params[ 'indexes' ] is not None:
- job.state = self.app.model.DeferredJob.states.WAITING
- job.params[ 'indexjobs' ] = []
- else:
- job.state = self.app.model.DeferredJob.states.OK
- job.params[ 'type' ] = 'finish_transfer'
- transfer.path = os.path.abspath(destfilepath)
- transfer.state = 'done'
- self.sa_session.add( job )
- self.sa_session.add( transfer )
- if transfer.state == 'done':
- if params[ 'indexes' ] is not None:
- for indexer in params[ 'indexes' ]:
- incoming = dict(indexer=indexer, dbkey=params[ 'dbkey' ], intname=params[ 'intname' ], path=transfer.path, user=params['user'] )
- deferred = self.tool.execute( self, set_output_hid=False, history=None, incoming=incoming, transfer=transfer, deferred=job )
- job.params[ 'indexjobs' ].append( deferred[0].id )
- else:
- job.state = self.app.model.DeferredJob.states.OK
- self.sa_session.add( job )
- self.sa_session.flush()
- return self.app.model.DeferredJob.states.OK
-
- def _check_compress( self, filepath ):
- retval = ''
- if tarfile.is_tarfile( filepath ):
- retval = 'tar.'
- if check_zip( filepath ):
- return 'zip'
- is_bzipped, is_valid = check_bz2( filepath )
- if is_bzipped and is_valid:
- return retval + 'bzip'
- is_gzipped, is_valid = check_gzip( filepath )
- if is_gzipped and is_valid:
- return retval + 'gzip'
- return None
-
- def _add_line( self, locfile, newline ):
- filepath = self.locations[ locfile ]
- origlines = []
- output = []
- comments = []
- with open( filepath, 'r' ) as destfile:
- for line in destfile:
- if line.startswith( '#' ):
- comments.append( line.strip() )
- else:
- origlines.append( line.strip() )
- if newline not in origlines:
- origlines.append( newline )
- output.extend( comments )
- origlines.sort()
- output.extend( origlines )
- with open( filepath, 'w+' ) as destfile:
- destfile.write( '\n'.join( output ) )
-
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/jobs/deferred/liftover_transfer.py
--- a/lib/galaxy/jobs/deferred/liftover_transfer.py
+++ /dev/null
@@ -1,158 +0,0 @@
-"""
-Module for managing genome transfer jobs.
-"""
-from __future__ import with_statement
-
-import logging, shutil, gzip, tempfile, sys
-
-from galaxy import eggs
-from sqlalchemy import and_
-
-from galaxy.util.odict import odict
-from galaxy.workflow.modules import module_factory
-from galaxy.jobs.actions.post import ActionBox
-
-from galaxy.tools.parameters import visit_input_values
-from galaxy.tools.parameters.basic import DataToolParameter
-
-from galaxy.datatypes.checkers import *
-
-from data_transfer import *
-
-log = logging.getLogger( __name__ )
-
-__all__ = [ 'LiftOverTransferPlugin' ]
-
-class LiftOverTransferPlugin( DataTransfer ):
-
- locations = {}
-
- def __init__( self, app ):
- super( LiftOverTransferPlugin, self ).__init__( app )
- self.app = app
- self.sa_session = app.model.context.current
-
- def create_job( self, trans, url, dbkey, from_genome, to_genome, destfile, parentjob ):
- job = trans.app.transfer_manager.new( protocol='http', url=url )
- params = dict( user=trans.user.id, transfer_job_id=job.id, protocol='http',
- type='init_transfer', dbkey=dbkey, from_genome=from_genome,
- to_genome=to_genome, destfile=destfile, parentjob=parentjob )
- deferred = trans.app.model.DeferredJob( state = self.app.model.DeferredJob.states.NEW, plugin = 'LiftOverTransferPlugin', params = params )
- self.sa_session.add( deferred )
- self.sa_session.flush()
- return deferred.id
-
- def check_job( self, job ):
- if job.params['type'] == 'init_transfer':
- if not hasattr(job, 'transfer_job'):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
- if job.transfer_job.state == 'done':
- transfer = job.transfer_job
- transfer.state = 'downloaded'
- job.params['type'] = 'extract_transfer'
- self.sa_session.add( job )
- self.sa_session.add( transfer )
- self.sa_session.flush()
- return self.job_states.READY
- elif job.transfer_job.state == 'running':
- return self.job_states.WAIT
- elif job.transfer_job.state == 'new':
- assert job.params[ 'protocol' ] in [ 'http', 'ftp', 'https' ], 'Unknown protocol %s' % job.params[ 'protocol' ]
- ready = True
- parent = self.sa_session.query( self.app.model.DeferredJob ).get( int( job.params[ 'parentjob' ] ) )
- if not hasattr( parent, 'transfer_job' ):
- parent.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( parent.params[ 'transfer_job_id' ] ) )
- if parent.transfer_job.state not in [ 'ok', 'error', 'done' ]:
- ready = False
- for lo_job in parent.params[ 'liftover' ]:
- liftoverjob = self.sa_session.query( self.app.model.TransferJob ).get( int( lo_job ) )
- if liftoverjob:
- if liftoverjob.state not in [ 'ok', 'error', 'new', 'done' ]:
- ready = False
- if ready:
- self.app.transfer_manager.run( job.transfer_job )
- self.sa_session.add( job.transfer_job )
- self.sa_session.flush()
- return self.job_states.WAIT
- else:
- log.error( "An error occurred while downloading from %s" % job.transfer_job.params[ 'url' ] )
- return self.job_states.INVALID
- elif job.params[ 'type' ] == 'extract_transfer':
- return self.job_states.READY
-
- def get_job_status( self, jobid ):
- job = self.sa_session.query( self.app.model.DeferredJob ).get( int( jobid ) )
- return job
-
- def run_job( self, job ):
- params = job.params
- dbkey = params[ 'dbkey' ]
- source = params[ 'from_genome' ]
- target = params[ 'to_genome' ]
- if not hasattr( job, 'transfer_job' ):
- job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
- else:
- self.sa_session.refresh( job.transfer_job )
- transfer = job.transfer_job
- if params[ 'type' ] == 'extract_transfer':
- CHUNK_SIZE = 2**20
- destpath = os.path.join( self.app.config.get( 'genome_data_path', 'tool-data/genome' ), source, 'liftOver' )
- if not os.path.exists( destpath ):
- os.makedirs( destpath )
- destfile = job.params[ 'destfile' ]
- destfilepath = os.path.join( destpath, destfile )
- tmpprefix = '%s_%s_download_unzip_' % ( job.params['dbkey'], job.params[ 'transfer_job_id' ] )
- tmppath = os.path.dirname( os.path.abspath( transfer.path ) )
- if not os.path.exists( destpath ):
- os.makedirs( destpath )
- fd, uncompressed = tempfile.mkstemp( prefix=tmpprefix, dir=tmppath, text=False )
- chain = gzip.open( transfer.path, 'rb' )
- while 1:
- try:
- chunk = chain.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- log.error( 'Problem decompressing compressed data' )
- exit()
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- chain.close()
- # Replace the gzipped file with the decompressed file if it's safe to do so
- shutil.move( uncompressed, destfilepath )
- os.remove( transfer.path )
- os.chmod( destfilepath, 0644 )
- locline = '\t'.join( [ source, target, os.path.abspath( destfilepath ) ] )
- self._add_line( locline )
- job.state = self.app.model.DeferredJob.states.OK
- job.params[ 'type' ] = 'finish_transfer'
- transfer.path = os.path.abspath(destfilepath)
- transfer.state = 'done'
- parentjob = self.sa_session.query( self.app.model.DeferredJob ).get( int( job.params[ 'parentjob' ] ) )
- finished = True
- for i in parentjob.params[ 'liftover' ]:
- sibling = self.sa_session.query( self.app.model.DeferredJob ).get( int( i ) )
- if sibling.state not in [ 'done', 'ok', 'error' ]:
- finished = False
- if finished:
- parentjob.state = self.app.model.DeferredJob.states.OK
- self.sa_session.add( parentjob )
- self.sa_session.add( job )
- self.sa_session.add( transfer )
- self.sa_session.flush()
- return self.app.model.DeferredJob.states.OK
-
- def _add_line( self, newline ):
- filepath = 'tool-data/liftOver.loc'
- origlines = []
- with open( filepath, 'r' ) as destfile:
- for line in destfile:
- origlines.append( line.strip() )
- if newline not in origlines:
- origlines.append( newline )
- with open( filepath, 'w+' ) as destfile:
- destfile.write( '\n'.join( origlines ) )
-
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/tools/actions/index_genome.py
--- a/lib/galaxy/tools/actions/index_genome.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import tempfile
-from __init__ import ToolAction
-from galaxy.util.odict import odict
-from galaxy.tools.genome_index import *
-
-import logging
-log = logging.getLogger( __name__ )
-
-class GenomeIndexToolAction( ToolAction ):
- """Tool action used for exporting a history to an archive. """
-
- def execute( self, tool, trans, *args, **kwargs ):
- #
- # Get genome to index.
- #
- incoming = kwargs['incoming']
- #
- # Create the job and output dataset objects
- #
- job = trans.app.model.Job()
- job.tool_id = tool.id
- job.user_id = incoming['user']
- start_job_state = job.state # should be job.states.NEW
- job.state = job.states.WAITING # we need to set job state to something other than NEW,
- # or else when tracking jobs in db it will be picked up
- # before we have added input / output parameters
- trans.sa_session.add( job )
-
- # Create dataset that will serve as archive.
- temp_dataset = trans.app.model.Dataset( state=trans.app.model.Dataset.states.NEW )
- trans.sa_session.add( temp_dataset )
-
- trans.sa_session.flush() # ensure job.id and archive_dataset.id are available
- trans.app.object_store.create( temp_dataset ) # set the object store id, create dataset (because galaxy likes having datasets)
-
- #
- # Setup job and job wrapper.
- #
-
- # Add association for keeping track of index jobs, transfer jobs, and so on.
- user = trans.sa_session.query( trans.app.model.User ).get( int( incoming['user'] ) )
- assoc = trans.app.model.GenomeIndexToolData( job=job, dataset=temp_dataset, fasta_path=incoming['path'], \
- indexer=incoming['indexer'], user=user, \
- deferred_job=kwargs['deferred'], transfer_job=kwargs['transfer'] )
- trans.sa_session.add( assoc )
-
- job_wrapper = GenomeIndexToolWrapper( job )
- cmd_line = job_wrapper.setup_job( assoc )
-
- #
- # Add parameters to job_parameter table.
- #
- incoming[ '__GENOME_INDEX_COMMAND__' ] = cmd_line
- for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
- job.add_parameter( name, value )
-
- job.state = start_job_state # job inputs have been configured, restore initial job state
- job.set_handler(tool.get_job_handler(None))
- trans.sa_session.flush()
-
-
- # Queue the job for execution
- trans.app.job_queue.put( job.id, tool.id )
- log.info( "Added genome index job to the job queue, id: %s" % str( job.id ) )
-
- return job, odict()
-
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ /dev/null
@@ -1,243 +0,0 @@
-from __future__ import with_statement
-
-import json
-import logging
-import os
-import shutil
-import tarfile
-import tempfile
-
-from galaxy import model, util
-from galaxy.web.framework.helpers import to_unicode
-from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.util.json import *
-from galaxy.web.base.controller import UsesHistoryMixin
-from galaxy.tools.data import ToolDataTableManager
-
-
-log = logging.getLogger(__name__)
-
-def load_genome_index_tools( toolbox ):
- """ Adds tools for indexing genomes via the main job runner. """
- # Create XML for loading the tool.
- tool_xml_text = """
- <tool id="__GENOME_INDEX__" name="Index Genome" version="0.1" tool_type="genome_index">
- <type class="GenomeIndexTool" module="galaxy.tools"/>
- <action module="galaxy.tools.actions.index_genome" class="GenomeIndexToolAction"/>
- <command>$__GENOME_INDEX_COMMAND__ $output_file $output_file.files_path "$__app__.config.rsync_url" "$__app__.config.tool_data_path"</command>
- <inputs>
- <param name="__GENOME_INDEX_COMMAND__" type="hidden"/>
- </inputs>
- <outputs>
- <data format="txt" name="output_file"/>
- </outputs>
- <stdio>
- <exit_code range="1:" err_level="fatal" />
- </stdio>
- </tool>
- """
-
- # Load index tool.
- tmp_name = tempfile.NamedTemporaryFile()
- tmp_name.write( tool_xml_text )
- tmp_name.flush()
- genome_index_tool = toolbox.load_tool( tmp_name.name )
- toolbox.tools_by_id[ genome_index_tool.id ] = genome_index_tool
- log.debug( "Loaded genome index tool: %s", genome_index_tool.id )
-
-class GenomeIndexToolWrapper( object ):
- """ Provides support for performing jobs that index a genome. """
- def __init__( self, job_id ):
- self.locations = dict()
- self.job_id = job_id
-
- def setup_job( self, genobj ):
- """ Perform setup for job to index a genome and return an archive. Method generates
- attribute files, sets the corresponding attributes in the associated database
- object, and returns a command line for running the job. The command line
- includes the command, inputs, and options; it does not include the output
- file because it must be set at runtime. """
-
- #
- # Create and return command line for running tool.
- #
- scriptpath = os.path.join( os.path.abspath( os.getcwd() ), "lib/galaxy/tools/genome_index/index_genome.py" )
- return "python %s %s %s" % ( scriptpath, genobj.indexer, genobj.fasta_path )
-
- def postprocessing( self, sa_session, app ):
- """ Finish the job, move the finished indexes to their final resting place,
- and update the .loc files where applicable. """
- gitd = sa_session.query( model.GenomeIndexToolData ).filter_by( job_id=self.job_id ).first()
- indexdirs = dict( bfast='bfast_index', bowtie='bowtie_index', bowtie2='bowtie2_index',
- bwa='bwa_index', perm='perm_%s_index', picard='srma_index', sam='sam_index' )
-
-
- if gitd:
- fp = open( gitd.dataset.get_file_name(), 'r' )
- deferred = sa_session.query( model.DeferredJob ).filter_by( id=gitd.deferred_job_id ).first()
- try:
- logloc = json.load( fp )
- except ValueError:
- deferred.state = app.model.DeferredJob.states.ERROR
- sa_session.add( deferred )
- sa_session.flush()
- log.debug( 'Indexing job failed, setting deferred job state to error.' )
- return False
- finally:
- fp.close()
- destination = None
- tdtman = ToolDataTableManager( app.config.tool_data_path )
- xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
- for node in xmltree:
- table = node.get('name')
- location = node.findall('file')[0].get('path')
- self.locations[table] = os.path.abspath( location )
- locbase = os.path.abspath( os.path.split( self.locations['all_fasta'] )[0] )
- params = deferred.params
- dbkey = params[ 'dbkey' ]
- basepath = os.path.join( os.path.abspath( app.config.genome_data_path ), dbkey )
- intname = params[ 'intname' ]
- indexer = gitd.indexer
- workingdir = os.path.abspath( gitd.dataset.extra_files_path )
- location = []
- indexdata = gitd.dataset.extra_files_path
- if indexer == '2bit':
- indexdata = os.path.join( workingdir, '%s.2bit' % dbkey )
- destination = os.path.join( basepath, 'seq', '%s.2bit' % dbkey )
- location.append( dict( line='\t'.join( [ 'seq', dbkey, destination ] ), file= os.path.join( locbase, 'alignseq.loc' ) ) )
- elif indexer == 'bowtie':
- self._ex_tar( workingdir, 'cs.tar' )
- destination = os.path.join( basepath, 'bowtie_index' )
- for var in [ 'nt', 'cs' ]:
- for line in logloc[ var ]:
- idx = line
- if var == 'nt':
- locfile = self.locations[ 'bowtie_indexes' ]
- locdir = os.path.join( destination, idx )
- else:
- locfile = self.locations[ 'bowtie_indexes_color' ]
- locdir = os.path.join( destination, var, idx )
- location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
- elif indexer == 'bowtie2':
- destination = os.path.join( basepath, 'bowtie2_index' )
- for line in logloc[ 'nt' ]:
- idx = line
- locfile = self.locations[ 'bowtie2_indexes' ]
- locdir = os.path.join( destination, idx )
- location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
- elif indexer == 'bwa':
- self._ex_tar( workingdir, 'cs.tar' )
- destination = os.path.join( basepath, 'bwa_index' )
- for var in [ 'nt', 'cs' ]:
- for line in logloc[ var ]:
- idx = line
- if var == 'nt':
- locfile = self.locations[ 'bwa_indexes' ]
- locdir = os.path.join( destination, idx )
- else:
- locfile = self.locations[ 'bwa_indexes_color' ]
- locdir = os.path.join( destination, var, idx )
- location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
- elif indexer == 'perm':
- self._ex_tar( workingdir, 'cs.tar' )
- destination = os.path.join( basepath, 'perm_index' )
- for var in [ 'nt', 'cs' ]:
- for line in logloc[ var ]:
- idx = line.pop()
- if var == 'nt':
- locfile = self.locations[ 'perm_base_indexes' ]
- locdir = os.path.join( destination, idx )
- else:
- locfile = self.locations[ 'perm_color_indexes' ]
- locdir = os.path.join( destination, var, idx )
- line.append( locdir )
- location.append( dict( line='\t'.join( line ), file=locfile ) )
- elif indexer == 'picard':
- destination = os.path.join( basepath, 'srma_index' )
- for var in [ 'nt' ]:
- for line in logloc[ var ]:
- idx = line
- locfile = self.locations[ 'picard_indexes' ]
- locdir = os.path.join( destination, idx )
- location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
- elif indexer == 'sam':
- destination = os.path.join( basepath, 'sam_index' )
- for var in [ 'nt' ]:
- for line in logloc[ var ]:
- locfile = self.locations[ 'sam_fa_indexes' ]
- locdir = os.path.join( destination, line )
- location.append( dict( line='\t'.join( [ 'index', dbkey, locdir ] ), file=locfile ) )
-
- if destination is not None and os.path.exists( os.path.split( destination )[0] ) and not os.path.exists( destination ):
- log.debug( 'Moving %s to %s' % ( indexdata, destination ) )
- shutil.move( indexdata, destination )
- if indexer not in [ '2bit' ]:
- genome = '%s.fa' % dbkey
- target = os.path.join( destination, genome )
- fasta = os.path.abspath( os.path.join( basepath, 'seq', genome ) )
- self._check_link( fasta, target )
- if os.path.exists( os.path.join( destination, 'cs' ) ):
- target = os.path.join( destination, 'cs', genome )
- fasta = os.path.abspath( os.path.join( basepath, 'seq', genome ) )
- self._check_link( fasta, target )
- for line in location:
- self._add_line( line[ 'file' ], line[ 'line' ] )
- deferred.state = app.model.DeferredJob.states.OK
- sa_session.add( deferred )
- sa_session.flush()
-
-
- def _check_link( self, targetfile, symlink ):
- target = os.path.relpath( targetfile, os.path.dirname( symlink ) )
- filename = os.path.basename( targetfile )
- if not os.path.exists( targetfile ): # this should never happen.
- raise Exception, "%s not found. Unable to proceed without a FASTA file. Aborting." % targetfile
- if os.path.exists( symlink ) and os.path.islink( symlink ):
- if os.path.realpath( symlink ) == os.path.abspath( targetfile ): # symlink exists, points to the correct FASTA file.
- return
- else: # no it doesn't. Make a new one, and this time do it right.
- os.remove( symlink )
- os.symlink( target, symlink )
- return
- elif not os.path.exists( symlink ): # no symlink to the FASTA file. Create one.
- os.symlink( target, symlink )
- return
- elif os.path.exists( symlink ) and not os.path.islink( symlink ):
- if self._hash_file( targetfile ) == self._hash_file( symlink ): # files are identical. No need to panic.
- return
- else:
- if os.path.getsize( symlink ) == 0: # somehow an empty file got copied instead of the symlink. Delete with extreme prejudice.
- os.remove( symlink )
- os.symlink( target, symlink )
- return
- else:
- raise Exception, "Regular file %s exists, is not empty, contents do not match %s." % ( symlink, targetfile )
-
- def _hash_file( self, filename ):
- import hashlib
- md5 = hashlib.md5()
- with open( filename, 'rb' ) as f:
- for chunk in iter( lambda: f.read( 8192 ), '' ):
- md5.update( chunk )
- return md5.digest()
-
-
- def _ex_tar( self, directory, filename ):
- fh = tarfile.open( os.path.join( directory, filename ) )
- fh.extractall( path=directory )
- fh.close()
- os.remove( os.path.join( directory, filename ) )
-
- def _add_line( self, locfile, newline ):
- filepath = locfile
- origlines = []
- output = []
- comments = []
- with open( filepath, 'r' ) as destfile:
- for line in destfile:
- origlines.append( line.strip() )
- if newline not in origlines:
- origlines.append( newline )
- with open( filepath, 'w+' ) as destfile:
- origlines.append( '' )
- destfile.write( '\n'.join( origlines ) )
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/tools/genome_index/index_genome.py
--- a/lib/galaxy/tools/genome_index/index_genome.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python
-"""
-Export a history to an archive file using attribute files.
-
-usage: %prog history_attrs dataset_attrs job_attrs out_file
- -G, --gzip: gzip archive file
-"""
-from __future__ import with_statement
-
-import json
-import optparse
-import os
-import shlex
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-import time
-
-
-class ManagedIndexer():
- def __init__( self, output_file, infile, workingdir, rsync_url, tooldata ):
- self.tooldatapath = os.path.abspath( tooldata )
- self.workingdir = os.path.abspath( workingdir )
- self.outfile = open( os.path.abspath( output_file ), 'w' )
- self.basedir = os.path.split( self.workingdir )[0]
- self.fasta = os.path.abspath( infile )
- self.locations = dict( nt=[], cs=[] )
- self.log = []
- self.rsync_opts = '-aclSzq'
- self.rsync_url = rsync_url
- self.indexers = {
- 'bwa': '_bwa',
- 'bowtie': '_bowtie',
- 'bowtie2': '_bowtie2',
- '2bit': '_twobit',
- 'perm': '_perm',
- 'bfast': '_bfast',
- 'picard': '_picard',
- 'sam': '_sam'
- }
- if not os.path.exists( self.workingdir ):
- os.makedirs( self.workingdir )
- self.logfile = open( os.path.join( self.workingdir, 'ManagedIndexer.log' ), 'w+' )
-
- def run_indexer( self, indexer ):
- self.fapath = self.fasta
- self.fafile = os.path.basename( self.fapath )
- self.genome = os.path.splitext( self.fafile )[0]
- with WithChDir( self.basedir ):
- if indexer not in self.indexers:
- sys.stderr.write( 'The requested indexing function does not exist' )
- exit(127)
- else:
- with WithChDir( self.workingdir ):
- self._log( 'Running indexer %s.' % indexer )
- result = getattr( self, self.indexers[ indexer ] )()
- if result in [ None, False ]:
- sys.stderr.write( 'Error running indexer %s, %s' % ( indexer, result ) )
- self._flush_files()
- exit(1)
- else:
- self._log( self.locations )
- self._log( 'Indexer %s completed successfully.' % indexer )
- self._flush_files()
- exit(0)
-
- def _check_link( self ):
- self._log( 'Checking symlink to %s' % self.fafile )
- if not os.path.exists( self.fafile ):
- self._log( 'Symlink not found, creating' )
- os.symlink( os.path.relpath( self.fapath ), self.fafile )
-
- def _do_rsync( self, idxpath ):
- self._log( 'Trying rsync at %s/%s%s' % ( self.rsync_url, self.genome, idxpath ) )
- result = subprocess.call( shlex.split( 'rsync %s %s/%s%s .' % ( self.rsync_opts, self.rsync_url, self.genome, idxpath ) ), stderr=self.logfile )
- if result != 0:
- self._log( 'Rsync failed or index not found. Generating.' )
- else:
- self._log( 'Rsync succeeded.' )
- return result
-
- def _flush_files( self ):
- json.dump( self.locations, self.outfile )
- self.outfile.close()
- self.logfile.close()
-
- def _log( self, stuff ):
- timestamp = time.strftime('%Y-%m-%d %H:%M:%S %z')
- self.logfile.write( "[%s] %s\n" % (timestamp, stuff) )
-
- def _bwa( self ):
- result = self._do_rsync( '/bwa_index/' )
- if result == 0:
- self.locations[ 'nt' ].append( self.fafile )
- return self._bwa_cs()
- else:
- self._check_link()
- command = shlex.split( 'bwa index -a bwtsw %s' % self.fafile )
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result != 0:
- newcommand = shlex.split( 'bwa index -c %s' % self.fafile )
- result = call( newcommand, stderr=self.logfile, stdout=self.logfile )
- if result == 0:
- self.locations[ 'nt' ].append( self.fafile )
- os.remove( self.fafile )
- return self._bwa_cs()
- else:
- self._log( 'BWA (base) exited with code %s' % result )
- return False
-
- def _bwa_cs( self ):
- if not os.path.exists( os.path.join( self.workingdir, 'cs' ) ):
- os.makedirs( 'cs' )
- with WithChDir( 'cs' ):
- self._check_link()
- command = shlex.split( 'bwa index -a bwtsw -c %s' % self.fafile )
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result != 0:
- newcommand = shlex.split( 'bwa index -c %s' % self.fafile )
- result = call( newcommand, stderr=self.logfile, stdout=self.logfile )
- if result == 0:
- self.locations[ 'cs' ].append( self.fafile )
- os.remove( self.fafile )
- else:
- self._log( 'BWA (color) exited with code %s' % result )
- return False
- else:
- self.locations[ 'cs' ].append( self.fafile )
- os.remove( self.fafile )
- else:
- self.locations[ 'cs' ].append( self.fafile )
- temptar = tarfile.open( 'cs.tar', 'w' )
- temptar.add( 'cs' )
- temptar.close()
- shutil.rmtree( 'cs' )
- return True
-
-
- def _bowtie( self ):
- result = self._do_rsync( '/bowtie_index/' )
- if result == 0:
- self.locations[ 'nt' ].append( self.genome )
- return self._bowtie_cs()
- else:
- self._check_link()
- command = shlex.split( 'bowtie-build -f %s %s' % ( self.fafile, self.genome ) )
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result == 0:
- self.locations[ 'nt' ].append( self.genome )
- os.remove( self.fafile )
- return self._bowtie_cs()
- else:
- self._log( 'Bowtie (base) exited with code %s' % result )
- return False
-
- def _bowtie_cs( self ):
- indexdir = os.path.join( os.getcwd(), 'cs' )
- if not ( os.path.exists( indexdir ) ):
- os.makedirs( indexdir )
- with WithChDir( indexdir ):
- self._check_link()
- command = shlex.split( 'bowtie-build -C -f %s %s' % ( self.fafile, self.genome ) )
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result == 0:
- self.locations[ 'cs' ].append( self.genome )
- else:
- self._log( 'Bowtie (color) exited with code %s' % result )
- return False
- os.remove( os.path.join( indexdir, self.fafile ) )
- else:
- self.locations[ 'cs' ].append( self.genome )
- temptar = tarfile.open( 'cs.tar', 'w' )
- temptar.add( 'cs' )
- temptar.close()
- shutil.rmtree( 'cs' )
- return True
-
-
- def _bowtie2( self ):
- result = self._do_rsync( '/bowtie2_index/' )
- if result == 0:
- self.locations[ 'nt' ].append( self.fafile )
- return True
- ref_base = os.path.splitext(self.fafile)[0]
- self._check_link()
- command = shlex.split( 'bowtie2-build %s %s' % ( self.fafile, ref_base ) )
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result == 0:
- self.locations[ 'nt' ].append( ref_base )
- os.remove( self.fafile )
- return True
- else:
- self._log( 'Bowtie2 exited with code %s' % result )
- return False
-
- def _twobit( self ):
- """Index reference files using 2bit for random access.
- """
- result = self._do_rsync( '/seq/%s.2bit' % self.genome )
- if result == 0:
- self.locations['nt'].append( "%s.2bit" % self.genome )
- return True
- else:
- out_file = "%s.2bit" % self.genome
- self._check_link()
- command = shlex.split( 'faToTwoBit %s %s' % ( self.fafile, out_file ) )
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result == 0:
- self.locations['nt'].append( out_file )
- os.remove( self.fafile )
- return True
- else:
- self._log( 'faToTwoBit exited with code %s' % result )
- return False
-
- def _perm( self ):
- result = self._do_rsync( '/perm_index/' )
- self._check_link()
- genome = self.genome
- read_length = 50
- for seed in [ 'F3', 'F4' ]:
- key = '%s_%s_%s' % (self.genome, seed, read_length)
- desc = '%s: seed=%s, read length=%s' % (self.genome, seed, read_length)
- index = "%s_base_%s_%s.index" % (self.genome, seed, read_length)
- if not os.path.exists( index ):
- command = shlex.split("PerM %s %s --readFormat fastq --seed %s -m -s %s" % (self.fafile, read_length, seed, index))
- result = subprocess.call( command )
- if result != 0:
- self._log( 'PerM (base) exited with code %s' % result )
- return False
- self.locations[ 'nt' ].append( [ key, desc, index ] )
- os.remove( self.fafile )
- return self._perm_cs()
-
- def _perm_cs( self ):
- genome = self.genome
- read_length = 50
- if not os.path.exists( 'cs' ):
- os.makedirs( 'cs' )
- with WithChDir( 'cs' ):
- self._check_link()
- for seed in [ 'F3', 'F4' ]:
- key = '%s_%s_%s' % (genome, seed, read_length)
- desc = '%s: seed=%s, read length=%s' % (genome, seed, read_length)
- index = "%s_color_%s_%s.index" % (genome, seed, read_length)
- if not os.path.exists( index ):
- command = shlex.split("PerM %s %s --readFormat csfastq --seed %s -m -s %s" % (self.fafile, read_length, seed, index))
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result != 0:
- self._log( 'PerM (color) exited with code %s' % result )
- return False
- self.locations[ 'cs' ].append( [ key, desc, index ] )
- os.remove( self.fafile )
- temptar = tarfile.open( 'cs.tar', 'w' )
- temptar.add( 'cs' )
- temptar.close()
- shutil.rmtree( 'cs' )
- return True
-
- def _picard( self ):
- result = self._do_rsync( '/srma_index/' )
- if result == 0 and os.path.exists( '%s.dict' % self.genome):
- self.locations[ 'nt' ].append( self.fafile )
- return True
- local_ref = self.fafile
- srma = os.path.abspath( os.path.join( self.tooldatapath, 'shared/jars/picard/CreateSequenceDictionary.jar' ) )
- genome = os.path.splitext( self.fafile )[0]
- self._check_link()
- if not os.path.exists( '%s.fai' % self.fafile ) and not os.path.exists( '%s.fai' % self.genome ):
- command = shlex.split( 'samtools faidx %s' % self.fafile )
- subprocess.call( command, stderr=self.logfile )
- command = shlex.split( "java -jar %s R=%s O=%s.dict URI=%s" \
- % ( srma, local_ref, genome, local_ref ) )
- if not os.path.exists( '%s.dict' % self.genome ):
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- self._log( ' '.join( command ) )
- if result != 0:
- self._log( 'Picard exited with code %s' % result )
- return False
- self.locations[ 'nt' ].append( self.fafile )
- os.remove( self.fafile )
- return True
-
- def _sam( self ):
- local_ref = self.fafile
- local_file = os.path.splitext( self.fafile )[ 0 ]
- print 'Trying rsync'
- result = self._do_rsync( '/sam_index/' )
- if result == 0 and ( os.path.exists( '%s.fai' % self.fafile ) or os.path.exists( '%s.fai' % self.genome ) ):
- self.locations[ 'nt' ].append( '%s.fai' % local_ref )
- return True
- self._check_link()
- print 'Trying indexer'
- command = shlex.split("samtools faidx %s" % local_ref)
- result = subprocess.call( command, stderr=self.logfile, stdout=self.logfile )
- if result != 0:
- self._log( 'SAM exited with code %s' % result )
- return False
- else:
- self.locations[ 'nt' ].append( '%s.fai' % local_ref )
- os.remove( local_ref )
- return True
-
-class WithChDir():
- def __init__( self, target ):
- self.working = target
- self.previous = os.getcwd()
- def __enter__( self ):
- os.chdir( self.working )
- def __exit__( self, *args ):
- os.chdir( self.previous )
-
-
-if __name__ == "__main__":
- # Parse command line.
- parser = optparse.OptionParser()
- (options, args) = parser.parse_args()
- indexer, infile, outfile, working_dir, rsync_url, tooldata = args
-
- # Create archive.
- idxobj = ManagedIndexer( outfile, infile, working_dir, rsync_url, tooldata )
- returncode = idxobj.run_indexer( indexer )
- if not returncode:
- exit(1)
- exit(0)
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 lib/galaxy/webapps/galaxy/controllers/data_admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/data_admin.py
+++ /dev/null
@@ -1,299 +0,0 @@
-import ftplib
-import json
-import sys
-from galaxy import model, util
-from galaxy.jobs import transfer_manager
-from galaxy.model.orm import *
-from galaxy.web.base.controller import *
-from galaxy.web.framework.helpers import grids, iff, time_ago
-from library_common import get_comptypes, lucene_search, whoosh_search
-
-
-# Older py compatibility
-try:
- set()
-except:
- from sets import Set as set
-
-import logging
-log = logging.getLogger( __name__ )
-
-class DataAdmin( BaseUIController ):
- jobstyles = dict(
- done='panel-done-message',
- waiting='state-color-waiting',
- running='state-color-running',
- downloaded='state-color-running',
- new='state-color-new',
- ok='panel-done-message',
- error='panel-error-message',
- queued='state-color-waiting'
- )
-
- @web.expose
- @web.require_admin
- def manage_data( self, trans, **kwd ):
- if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.' )
- if 'all_fasta' not in trans.app.tool_data_tables.data_tables:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='The local data manager requires that an all_fasta entry exists in your tool_data_table_conf.xml.' )
- indextable = {}
- dbkeys = []
- labels = { 'bowtie_indexes': 'Bowtie', 'bowtie2_indexes': 'Bowtie 2', 'bwa_indexes': 'BWA', 'srma_indexes': 'Picard', 'sam_fa_indexes': 'SAM', 'perm_base_indexes': 'PerM' }
- tablenames = { 'Bowtie': 'bowtie_indexes', 'Bowtie 2': 'bowtie2_indexes', 'BWA': 'bwa_indexes', 'Picard': 'srma_indexes', 'SAM': 'sam_fa_indexes', 'PerM': 'perm_base_indexes' }
- indexfuncs = dict( bowtie_indexes='bowtie', bowtie2_indexes='bowtie2', bwa_indexes='bwa', srma_indexes='picard', sam_fa_indexes='sam', perm_base_indexes='perm' )
- for genome in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
- dbkey = genome[0]
- dbkeys.append( dbkey )
- indextable[ dbkey ] = dict( indexes=dict(), name=genome[2], path=genome[3] )
- for genome in indextable:
- for label in labels:
- indextable[ genome ][ 'indexes' ][ label ] = 'Generate'
- if label not in trans.app.tool_data_tables.data_tables:
- indextable[ genome ][ 'indexes' ][ label ] = 'Disabled'
- else:
- for row in trans.app.tool_data_tables.data_tables[ label ].data:
- if genome in row or row[0].startswith( genome ):
- indextable[ genome ][ 'indexes' ][ label ] = 'Generated'
- jobgrid = []
- sa_session = trans.app.model.context.current
- jobs = sa_session.query( model.GenomeIndexToolData ).order_by( model.GenomeIndexToolData.created_time.desc() ).filter_by( user_id=trans.get_user().id ).group_by( model.GenomeIndexToolData.deferred ).limit( 20 ).all()
- prevjobid = 0
- for job in jobs:
- if prevjobid == job.deferred.id:
- continue
- prevjobid = job.deferred.id
- state = job.deferred.state
- params = job.deferred.params
- if job.transfer is not None:
- jobtype = 'download'
- else:
- jobtype = 'index'
- indexers = ', '.join( params['indexes'] )
- jobgrid.append( dict( jobtype=jobtype, indexers=indexers, rowclass=state, deferred=job.deferred.id, state=state, intname=job.deferred.params[ 'intname' ], dbkey=job.deferred.params[ 'dbkey' ] ) )
- styles = dict( Generate=self.jobstyles['new'], Generated=self.jobstyles['ok'], Disabled=self.jobstyles['error'] )
- return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, indextable=indextable, labels=labels, dbkeys=dbkeys, styles=styles, indexfuncs=indexfuncs )
-
- @web.expose
- @web.require_admin
- def add_genome( self, trans, **kwd ):
- if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.' )
- dbkeys = trans.ucsc_builds
- ensemblkeys = trans.ensembl_builds
- ncbikeys = trans.ncbi_builds
- return trans.fill_template( '/admin/data_admin/data_form.mako', dbkeys=dbkeys, ensembls=ensemblkeys, ncbi=ncbikeys )
-
- @web.expose
- @web.require_admin
- def genome_search( self, trans, **kwd ):
- results = list()
- ncbikeys = trans.ncbi_builds
- params = util.Params( kwd )
- search = params.get( 'q', None )
- limit = params.get( 'limit', None )
- if search is not None:
- query = search.lower()
- for row in ncbikeys:
- if query in row[ 'name' ].lower() or query in row[ 'dbkey' ].lower():
- result = '|'.join( [ ': '.join( [ row[ 'dbkey' ], row[ 'name' ] ] ), row[ 'dbkey' ] ] )
- results.append( result )
- if len( results ) >= limit:
- break
- return trans.fill_template( '/admin/data_admin/ajax_status.mako', json='\n'.join( results ) )
-
- @web.expose
- @web.require_admin
- def index_build( self, trans, **kwd ):
- """Index a previously downloaded genome."""
- params = util.Params( kwd )
- path = os.path.abspath( params.get( 'path', None ) )
- indexes = [ params.get( 'indexes', None ) ]
- dbkey = params.get( 'dbkey', None )
- intname = params.get( 'longname', None )
- indexjob = trans.app.job_manager.deferred_job_queue.plugins['GenomeIndexPlugin'].create_job( trans, path, indexes, dbkey, intname )
- return indexjob
-
- @web.expose
- @web.require_admin
- def download_build( self, trans, **kwd ):
- """Download a genome from a remote source and add it to the library."""
- params = util.Params( kwd )
- paramdict = build_param_dict( params, trans )
- if paramdict[ 'status' ] == 'error':
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message=paramdict[ 'message' ] )
- url = paramdict[ 'url' ]
- liftover = paramdict[ 'liftover' ]
- dbkey = paramdict[ 'dbkey' ]
- indexers = paramdict[ 'indexers' ]
- longname = paramdict[ 'longname' ]
- dbkeys = dict()
- protocol = 'http'
- if url is None:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Unable to generate a valid URL with the specified parameters.' )
- jobid = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].create_job( trans, url, dbkey, longname, indexers )
- chainjob = []
- if liftover is not None:
- for chain in liftover:
- liftover_url = u'ftp://hgdownload.cse.ucsc.edu%s' % chain[0]
- from_genome = chain[1]
- to_genome = chain[2]
- destfile = liftover_url.split('/')[-1].replace('.gz', '')
- lochain = trans.app.job_manager.deferred_job_queue.plugins['LiftOverTransferPlugin'].create_job( trans, liftover_url, dbkey, from_genome, to_genome, destfile, jobid )
- chainjob.append( lochain )
- job = trans.app.job_manager.deferred_job_queue.plugins['GenomeTransferPlugin'].get_job_status( jobid )
- job.params['liftover'] = chainjob
- trans.app.model.context.current.add( job )
- trans.app.model.context.current.flush()
- return trans.response.send_redirect( web.url_for( controller='data_admin',
- action='monitor_status',
- job=jobid ) )
-
- @web.expose
- @web.require_admin
- def monitor_status( self, trans, **kwd ):
- params = util.Params( kwd )
- jobid = params.get( 'job', '' )
- deferred = trans.app.model.context.current.query( model.DeferredJob ).filter_by( id=jobid ).first()
- if deferred is None:
- return trans.fill_template( '/admin/data_admin/generic_error.mako', message='Invalid genome downloader job specified.' )
- gname = deferred.params[ 'intname' ]
- indexers = ', '.join( deferred.params[ 'indexes' ] )
- jobs = self._get_jobs( deferred, trans )
- jsonjobs = json.dumps( jobs )
- return trans.fill_template( '/admin/data_admin/download_status.mako', name=gname, indexers=indexers, mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs )
-
- @web.expose
- @web.require_admin
- def get_jobs( self, trans, **kwd ):
- sa_session = trans.app.model.context.current
- jobs = []
- params = util.Params( kwd )
- jobid = params.get( 'jobid', '' )
- job = sa_session.query( model.DeferredJob ).filter_by( id=jobid ).first()
- jobs = self._get_jobs( job, trans )
- return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( jobs ) )
-
- def _get_job( self, jobid, jobtype, trans ):
- sa = trans.app.model.context.current
- if jobtype == 'liftover':
- liftoverjob = sa.query( model.DeferredJob ).filter_by( id=jobid ).first()
- job = sa.query( model.TransferJob ).filter_by( id=liftoverjob.params[ 'transfer_job_id' ] ).first()
- joblabel = 'Download liftOver (%s to %s)' % ( liftoverjob.params[ 'from_genome' ], liftoverjob.params[ 'to_genome' ] )
- elif jobtype == 'transfer':
- job = sa.query( model.TransferJob ).filter_by( id=jobid ).first()
- joblabel = 'Download Genome'
- elif jobtype == 'deferred':
- job = sa.query( model.DeferredJob ).filter_by( id=jobid ).first()
- joblabel = 'Main Controller'
- elif jobtype == 'index':
- job = sa.query( model.Job ).filter_by( id=jobid.job_id ).first()
- joblabel = 'Index Genome (%s)' % jobid.indexer
- return dict( status=job.state, jobid=job.id, style=self.jobstyles[job.state], type=jobtype, label=joblabel )
-
- def _get_jobs( self, deferredjob, trans ):
- jobs = []
- idxjobs = []
- sa_session = trans.app.model.context.current
- job = sa_session.query( model.GenomeIndexToolData ).filter_by( deferred=deferredjob ).first()
- jobs.append( self._get_job( deferredjob.id, 'deferred', trans ) )
- if 'transfer_job_id' in deferredjob.params: #hasattr( job, 'transfer' ) and job.transfer is not None: # This is a transfer job, check for indexers
- jobs.append( self._get_job( deferredjob.params[ 'transfer_job_id' ], 'transfer', trans ) )
- if hasattr( job, 'deferred' ):
- idxjobs = sa_session.query( model.GenomeIndexToolData ).filter_by( deferred=job.deferred, transfer=job.transfer ).all()
- if deferredjob.params.has_key( 'liftover' ) and deferredjob.params[ 'liftover' ] is not None:
- for jobid in deferredjob.params[ 'liftover' ]:
- jobs.append( self._get_job( jobid, 'liftover', trans ) )
- for idxjob in idxjobs:
- jobs.append( self._get_job( idxjob, 'index', trans ) )
- return jobs
-
-def build_param_dict( params, trans ):
-
- source = params.get('source', '')
- longname = params.get('longname', None)
- if not isinstance( params.get( 'indexers', None ), list ):
- indexers = [ params.get( 'indexers', None ) ]
- else:
- indexers = params.get( 'indexers', None )
- if indexers is not None:
- if indexers == [None]:
- indexers = None
- url = None
- liftover = None
- newlift = []
- dbkey = params.get( 'dbkey', None )
- dbkeys = dict()
- protocol = 'http'
-
- if source == 'NCBI':
- build = params.get('ncbi_name', '')
- dbkey = build.split( ': ' )[0]
- longname = build.split( ': ' )[-1]
- url = 'http://togows.dbcls.jp/entry/ncbi-nucleotide/%s.fasta' % dbkey
- elif source == 'URL':
- dbkey = params.get( 'url_dbkey', '' )
- url = params.get( 'url', None )
- longname = params.get( 'longname', None )
- elif source == 'UCSC':
- longname = None
- for build in trans.ucsc_builds:
- if dbkey == build[0]:
- dbkey = build[0]
- longname = build[1]
- break
- if dbkey == '?':
- return dict( status='error', message='An invalid build was specified.' )
- ftp = ftplib.FTP('hgdownload.cse.ucsc.edu')
- ftp.login('anonymous', trans.get_user().email)
- checker = []
- liftover = []
- newlift = []
- ftp.retrlines('NLST /goldenPath/%s/liftOver/*.chain.gz' % dbkey, liftover.append)
- try:
- for chain in liftover:
- lifts = []
- fname = chain.split( '/' )[-1]
- organisms = fname.replace( '.over.chain.gz', '' ).split( 'To' )
- lifts.append( [ organisms[0], organisms[1][0].lower() + organisms[1][1:] ] )
- lifts.append( [ organisms[1][0].lower() + organisms[1][1:], organisms[0] ] )
- for organism in lifts:
- remotepath = '/goldenPath/%s/liftOver/%sTo%s.over.chain.gz' % ( organism[0], organism[0], organism[1][0].upper() + organism[1][1:] )
- localfile = '%sTo%s.over.chain' % ( organism[0], organism[1][0].upper() + organism[1][1:] )
- localpath = os.path.join( trans.app.config.get( 'genome_data_path', 'tool-data/genome' ), organism[0], 'liftOver', localfile )
- if not os.path.exists( localpath ) or os.path.getsize( localpath ) == 0:
- newlift.append( [ remotepath, organism[0], organism[1] ] )
- except:
- newlift = None
- pass
- ftp.retrlines('NLST /goldenPath/%s/bigZips/' % dbkey, checker.append)
- ftp.quit()
- for filename in [ dbkey, 'chromFa' ]:
- for extension in [ '.tar.gz', '.tar.bz2', '.zip', '.fa.gz', '.fa.bz2' ]:
- testfile = '/goldenPath/%s/bigZips/%s%s' % ( dbkey, filename, extension )
- if testfile in checker:
- url = 'ftp://hgdownload.cse.ucsc.edu%s' % testfile
- break;
- else:
- continue
- if url is None:
- message = 'The genome %s was not found on the UCSC server.' % dbkey
- status = 'error'
- return dict( status=status, message=message )
-
- elif source == 'Ensembl':
- dbkey = params.get( 'ensembl_dbkey', None )
- if dbkey == '?':
- return dict( status='error', message='An invalid build was specified.' )
- for build in trans.ensembl_builds:
- if build[ 'dbkey' ] == dbkey:
- dbkey = build[ 'dbkey' ]
- release = build[ 'release' ]
- pathname = '_'.join( build[ 'name' ].split(' ')[0:2] )
- longname = build[ 'name' ].replace('_', ' ')
- break
- url = 'ftp://ftp.ensembl.org/pub/release-%s/fasta/%s/dna/%s.%s.%s.dna.toplevel.fa.…' % ( release, pathname.lower(), pathname, dbkey, release )
-
- params = dict( status='ok', dbkey=dbkey, datatype='fasta', url=url, user=trans.user.id, liftover=newlift, longname=longname, indexers=indexers )
-
- return params
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 templates/admin/data_admin/ajax_status.mako
--- a/templates/admin/data_admin/ajax_status.mako
+++ /dev/null
@@ -1,1 +0,0 @@
-${json}
\ No newline at end of file
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 templates/admin/data_admin/data_form.mako
--- a/templates/admin/data_admin/data_form.mako
+++ /dev/null
@@ -1,178 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-<%namespace file="/library/common/common.mako" import="common_javascripts" />
-
-<%!
- def inherit(context):
- if context.get('use_panels'):
- return '/webapps/galaxy/base_panels.mako'
- else:
- return '/base.mako'
-%>
-<%inherit file="${inherit(context)}"/>
-
-<%def name="init()">
-<%
- self.has_left_panel=False
- self.has_right_panel=False
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
- self.has_accessible_datasets = False
-%>
-</%def>
-<%def name="stylesheets()">
- ${parent.stylesheets()}
- ${h.css( "autocomplete_tagging" )}
-</%def>
-<%def name="javascripts()">
- ${parent.javascripts()}
- ${h.js("libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging" )}
-</%def>
-##
-## Override methods from base.mako and base_panels.mako
-##
-<%def name="center_panel()">
- <div style="overflow: auto; height: 100%;">
- <div class="page-container" style="padding: 10px;">
- ${render_content()}
- </div>
- </div>
-</%def>
-<style type="text/css">
- .params-block { display: none; }
-</style>
-<div class="toolForm">
- %if message:
- <div class="${status}">${message}</div>
- %endif
- <div class="toolFormTitle">Get build from a remote server</div>
- <div class="toolFormBody">
- <form name="download_build" action="${h.url_for( controller='data_admin', action='download_build' )}" enctype="multipart/form-data" method="post">
- <div class="form-row">
- <label for="source">Data Source</label>
- <select id="datasource" name="source" label="Data Source">
- <option value="UCSC">UCSC</option>
- <option value="URL">Direct Link</option>
- <option value="NCBI">NCBI</option>
- <option value="Ensembl">EnsemblGenome</option>
- </select>
- <div style="clear: both;"> </div>
- </div>
- <div class="form-row">
- <label for="indexers">Indexers</label>
- <select name="indexers" multiple style="width: 200px; height: 125px;">
- <option value="2bit" selected>TwoBit</option>
- <option value="bowtie">Bowtie</option>
- <option value="bowtie2">Bowtie 2</option>
- <option value="bwa">BWA</option>
- <option value="perm">PerM</option>
- <option value="picard">Picard</option>
- <option value="sam">sam</option>
- </select>
- <div class="toolParamHelp" style="clear: both;">
- Select the indexers you want to run on the FASTA file after downloading.
- </div>
- </div>
- <h2>Parameters</h2>
- <div id="params_URL" class="params-block">
- <div class="form-row">
- <label for="longname">Long Name</label>
- <input name="longname" type="text" label="Long Name" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- A descriptive name for this build.
- </div>
- </div>
- <div class="form-row">
- <label for="url_dbkey">DB Key</label>
- <input name="url_dbkey" type="text" label="DB Key" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- The internal DB key for this build. WARNING: Using a value that already exists in one or more .loc files may have unpredictable results.
- </div>
- </div>
- <div id="dlparams">
- <div class="form-row">
- <label for="url">URL</label>
- <input name="url" type="text" label="URL" />
- <div style="clear: both;"> </div>
- <div class="toolParamHelp" style="clear: both;">
- The URL to download this build from.
- </div>
- </div>
- </div>
- </div>
- <div id="params_NCBI" class="params-block">
- <div class="form-row">
- <label>Genome:</label>
- <div class="form-row-input">
- <input type="text" class="text-and-autocomplete-select ac_input" size="40" name="ncbi_name" id="ncbi_name" value="" />
- </div>
- <div class="toolParamHelp" style="clear: both;">
- If you can't find the build you want in this list, open a terminal and execute
- <pre>sh cron/updatencbi.sh</pre>
- in your galaxy root directory.
- </div>
- </div>
- </div>
- <div id="params_Ensembl" class="params-block">
- <div class="form-row">
- <label>Genome:</label>
- <div class="form-row-input">
- <select name="ensembl_dbkey" last_selected_value="?">
- %for dbkey in ensembls:
- <option value="${dbkey['dbkey']}">${dbkey['dbkey']} - ${dbkey['name']}</option>
- %endfor
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- If you can't find the build you want in this list, open a terminal and execute
- <pre>sh cron/updateensembl.sh</pre>
- in your galaxy root directory.
- </div>
- </div>
- </div>
- <div id="params_UCSC" class="params-block">
- <div class="form-row">
- <label>Genome:</label>
- <div class="form-row-input">
- <select name="dbkey" last_selected_value="?">
- %for dbkey in dbkeys:
- %if dbkey[0] == last_used_build:
- <option value="${dbkey[0]}" selected>${dbkey[1]}</option>
- %else:
- <option value="${dbkey[0]}">${dbkey[1]}</option>
- %endif
- %endfor
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- If you can't find the build you want in this list, open a terminal and execute
- <pre>sh cron/updateucsc.sh</pre>
- in your galaxy root directory.
- </div>
- </div>
- </div>
- <div class="form-row">
- <input type="submit" class="primary-button" name="runtool_btn" value="Download and index"/>
- </div>
- <script type="text/javascript">
- $(document).ready(function() {
- checkDataSource();
- });
- $('#datasource').change(function() {
- checkDataSource();
- });
- function checkDataSource() {
- var ds = $('#datasource').val();
- $('.params-block').each(function() {
- $(this).hide();
- });
- $('#params_' + ds).show();
- };
-
- var ac = $('#ncbi_name').autocomplete( $('#ncbi_name'), { minChars: 3, max: 100, url: '${h.url_for( controller='data_admin', action='genome_search' )}' } );
- </script>
- </form>
-</div>
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 templates/admin/data_admin/download_status.mako
--- a/templates/admin/data_admin/download_status.mako
+++ /dev/null
@@ -1,110 +0,0 @@
-<%namespace file="/library/common/library_item_info.mako" import="render_library_item_info" />
-<%namespace file="/library/common/common.mako" import="render_actions_on_multiple_items" />
-<%namespace file="/library/common/common.mako" import="render_compression_types_help" />
-<%namespace file="/library/common/common.mako" import="common_javascripts" />
-
-<%!
- def inherit(context):
- if context.get('use_panels'):
- return '/webapps/galaxy/base_panels.mako'
- else:
- return '/base.mako'
-%>
-<%inherit file="${inherit(context)}"/>
-
-<%def name="init()">
-<%
- self.has_left_panel=False
- self.has_right_panel=False
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
- self.has_accessible_datasets = False
-%>
-</%def>
-
-##
-## Override methods from base.mako and base_panels.mako
-##
-<%def name="center_panel()">
- <div style="overflow: auto; height: 100%;">
- <div class="page-container" style="padding: 10px;">
- ${render_content()}
- </div>
- </div>
-</%def>
-<p>${name} been added to the job queue
- %if indexers:
- to be indexed with ${indexers}
- %endif
- </p>
-<table id="jobStatus">
-</table>
-<p><a href="${h.url_for( controller='data_admin', action='manage_data' )}">Overview</a>.</p>
-<p><a href="${h.url_for( controller='data_admin', action='add_genome' )}">Download form</a>.</p>
-<script type="text/javascript">
- jobs = ${jsonjobs}
- finalstates = new Array('done', 'error', 'ok');
-
- function makeHTML(jobrow) {
- jc = 'jobrow ' + jobrow['style'];
- djid = jobrow['jobid'];
- jt = jobrow['type'];
- idval = jt + '-job-' + djid;
- return '<tr id="' + idval + '" class="' + jc + '" data-status="' + jobrow['status'] + '" data-jobid="' + djid + '" data-jobtype="' + jt + '">' +
- '<td style="padding: 0px 5px 0px 30px;">' + jobrow['label'] + '</td>' +
- '<td style="padding: 0px 5px;">' + jobrow['status'] + '</td></tr>';
- }
-
- function checkJobs() {
- var alldone = true;
- var mainjob;
- $('.jobrow').each(function() {
- status = $(this).attr('data-status');
- if ($(this).attr('data-jobtype') == 'deferred') {
- mainjob = $(this).attr('data-jobid');
- }
- if ($.inArray(status, finalstates) == -1) {
- alldone = false;
- }
- });
- if (!alldone) {
- checkForNewJobs(mainjob);
- $('#jobStatus').delay(3000).queue(function(n) {
- checkJobs();
- n();
- });
- }
- }
-
- function checkForNewJobs(mainjob) {
- $.get('${h.url_for( controller='data_admin', action='get_jobs' )}', { jobid: mainjob }, function(data) {
- jsondata = JSON.parse(data);
- for (i in jsondata) {
- currentjob = jsondata[i]
- if (jobs[i] == undefined) {
- $('#jobStatus').append(makeHTML(jsondata[i]));
- jobs.push(jsondata[i]);
- }
- $('#' + currentjob['type'] + '-job-' + currentjob['jobid']).replaceWith(makeHTML(currentjob));
- }
- });
- }
-
- $(document).ready(function() {
- for (job in jobs) {
- jobrow = jobs[job];
- $('#jobStatus').append(makeHTML(jobrow));
- if (jobrow['type'] == 'deferred') {
- $('#jobStatus').delay(5000).queue(function(n) {
- checkForNewJobs(jobrow['jobid']);
- n();
- }).fadeIn();
- }
- }
- $('#jobStatus').delay(3000).queue(function(n) {
- checkJobs();
- n();
- }).fadeIn();
- });
-</script>
\ No newline at end of file
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 templates/admin/data_admin/generic_error.mako
--- a/templates/admin/data_admin/generic_error.mako
+++ /dev/null
@@ -1,35 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-<%namespace file="/library/common/common.mako" import="common_javascripts" />
-
-<%!
- def inherit(context):
- if context.get('use_panels'):
- return '/webapps/galaxy/base_panels.mako'
- else:
- return '/base.mako'
-%>
-<%inherit file="${inherit(context)}"/>
-
-<%def name="init()">
-<%
- self.has_left_panel=False
- self.has_right_panel=False
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
- self.has_accessible_datasets = False
-%>
-</%def>
-<%def name="stylesheets()">
- ${parent.stylesheets()}
- ${h.css( "autocomplete_tagging" )}
-</%def>
-<%def name="javascripts()">
- ${parent.javascripts()}
- ${h.js("libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging" )}
-</%def>
-##
-## Override methods from base.mako and base_panels.mako
-##
-<p class="panel-error-message">${message}</p>
\ No newline at end of file
diff -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 -r b6fae2cd673c6a286138b9e4628f7c4ef0abb099 templates/admin/data_admin/local_data.mako
--- a/templates/admin/data_admin/local_data.mako
+++ /dev/null
@@ -1,169 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-<%namespace file="/library/common/common.mako" import="common_javascripts" />
-
-<%!
- def inherit(context):
- if context.get('use_panels'):
- return '/webapps/galaxy/base_panels.mako'
- else:
- return '/base.mako'
-%>
-<%inherit file="${inherit(context)}"/>
-
-<%def name="init()">
-<%
- self.has_left_panel=False
- self.has_right_panel=False
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
- self.has_accessible_datasets = False
-%>
-</%def>
-<%def name="stylesheets()">
- ${parent.stylesheets()}
- ${h.css( "autocomplete_tagging" )}
-</%def>
-<%def name="javascripts()">
- ${parent.javascripts()}
- ${h.js("libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging" )}
-</%def>
-##
-## Override methods from base.mako and base_panels.mako
-##
-<%def name="center_panel()">
- <div style="overflow: auto; height: 100%;">
- <div class="page-container" style="padding: 10px;">
- ${render_content()}
- </div>
- </div>
-</%def>
-<style type="text/css">
- .params-block { display: none; }
- td, th { padding-left: 10px; padding-right: 10px; }
- td.state-color-new { text-decoration: underline; }
- td.panel-done-message { background-image: none; padding: 0px 10px 0px 10px; }
- td.panel-error-message { background-image: none; padding: 0px 10px 0px 10px; }
-</style>
-<div class="toolForm">
- %if message:
- <div class="${status}">${message}</div>
- %endif
- <div class="toolFormTitle">Currently tracked builds <a class="action-button" href="${h.url_for( controller='data_admin', action='add_genome' )}">Add new</a></div>
- <div class="toolFormBody">
- <h2>Locally cached data:</h2>
- <h3>NOTE: Indexes generated here will not be reflected in the table until Galaxy is restarted.</h3>
- <table id="locfiles">
- <tr>
- <th>DB Key</th>
- <th>Name</th>
- %for label in labels:
- <th>${labels[label]}</th>
- %endfor
- </tr>
- %for dbkey in sorted(dbkeys):
- <tr>
- <td>${dbkey}</td>
- <td>${indextable[dbkey]['name']}</td>
- %for label in labels:
- <td id="${dbkey}-${indexfuncs[label]}" class="indexcell ${styles[indextable[dbkey]['indexes'][label]]}" data-fapath="${indextable[dbkey]['path']}" data-longname="${indextable[dbkey]['name']}" data-index="${indexfuncs[label]}" data-dbkey="${dbkey}">${indextable[dbkey]['indexes'][label]}</td>
- %endfor
-
- </tr>
- %endfor
- </table>
- <h2>Recent jobs:</h2>
- <p>Click the job ID to see job details and the status of any individual sub-jobs. Note that this list only shows jobs initiated by your account.</p>
- <div id="recentJobs">
- %for job in jobgrid:
- <div id="job-${job['deferred']}" data-dbkey="${job['dbkey']}" data-name="${job['intname']}" data-indexes="${job['indexers']}" data-jobid="${job['deferred']}" data-state="${job['state']}" class="historyItem-${job['state']} historyItemWrapper historyItem">
- <p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status', job=job['deferred'] )}">${job['deferred']}</a>:
- %if job['jobtype'] == 'download':
- Download <em>${job['intname']}</em>
- %if job['indexers']:
- and index with ${job['indexers']}
- %endif
- %else:
- Index <em>${job['intname']}</em> with ${job['indexers']}
- %endif
- </p>
- </div>
- %endfor
- </div>
-</div>
-<script type="text/javascript">
- finalstates = new Array('done', 'error', 'ok');
- $('.indexcell').click(function() {
- status = $(this).html();
- elem = $(this);
- if (status != 'Generate') {
- return;
- }
- longname = $(this).attr('data-longname');
- dbkey = $(this).attr('data-dbkey');
- indexes = $(this).attr('data-index');
- path = $(this).attr('data-fapath');
- $.post('${h.url_for( controller='data_admin', action='index_build' )}', { longname: longname, dbkey: dbkey, indexes: indexes, path: path }, function(data) {
- if (data == 'ERROR') {
- alert('There was an error.');
- }
- else {
- elem.html('Generating');
- elem.attr('class', 'indexcell state-color-running');
- }
- newhtml = '<div data-dbkey="' + dbkey + '" data-name="' + longname + '" data-indexes="' + indexes + '" id="job-' + data + '" class="historyItem-new historyItemWrapper historyItem">' +
- '<p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status')}?job=' + data + '">' + data + '</a>: ' +
- 'Index <em>' + longname + '</em> with ' + indexes + '</p></div>';
- $('#recentJobs').prepend(newhtml);
- $('#job-' + data).delay(3000).queue(function(n) {
- checkJob(data);
- n();
- });
- });
- });
-
- function checkJob(jobid) {
- $.get('${h.url_for( controller='data_admin', action='get_jobs' )}', { jobid: jobid }, function(data) {
- jsondata = JSON.parse(data)[0];
- jsondata["name"] = $('#job-' + jobid).attr('data-name');
- jsondata["dbkey"] = $('#job-' + jobid).attr('data-dbkey');
- jsondata["indexes"] = $('#job-' + jobid).attr('data-indexes');
- tdid = jq(jsondata["dbkey"] + '-' + jsondata["indexes"]);
- newhtml = makeNewHTML(jsondata);
- $('#job-' + jobid).replaceWith(newhtml);
- if ($.inArray(jsondata["status"], finalstates) == -1) {
- $('#job-' + jobid).delay(3000).queue(function(n) {
- checkJob(jobid);
- n();
- });
- }
- if (jsondata["status"] == 'done' || jsondata["status"] == 'ok') {
- elem = $(tdid);
- elem.html('Generated');
- elem.attr('class', 'indexcell panel-done-message');
- }
- });
- }
-
- function makeNewHTML(jsondata) {
- newhtml = '<div data-dbkey="' + jsondata["dbkey"] + '" data-name="' + jsondata["name"] + '" data-indexes="' + jsondata["indexes"] + '" id="job-' + jsondata["jobid"] + '" class="historyItem-' + jsondata["status"] + ' historyItemWrapper historyItem">' +
- '<p>Job ID <a href="${h.url_for( controller='data_admin', action='monitor_status')}?job=' + jsondata["jobid"] + '">' + jsondata["jobid"] + '</a>: ' +
- 'Index <em>' + jsondata["name"] + '</em> with ' + jsondata["indexes"] + '</p></div>';
- return newhtml;
- }
-
- $(document).ready(function() {
- $('.historyItem').each(function() {
- state = $(this).attr('data-state');
- jobid = $(this).attr('data-jobid');
- if ($.inArray(state, finalstates) == -1) {
- checkJob(jobid);
- }
- });
- });
-
- function jq(id) {
- return '#' + id.replace(/(:|\.)/g,'\\$1');
- }
-</script>
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/commits/d9ab8058b274/
Changeset: d9ab8058b274
Branch: stable
User: davebgx
Date: 2014-07-08 19:21:48
Summary: Merge stables.
Affected #: 4 files
diff -r c4519bae84d32d638d8fe168c6fbcab40f4e8449 -r d9ab8058b2748de6b4b1a17646c2d7456d709877 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -1065,6 +1065,13 @@
out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
inp_data.update( [ ( da.name, da.dataset ) for da in job.input_library_datasets ] )
out_data.update( [ ( da.name, da.dataset ) for da in job.output_library_datasets ] )
+ input_ext = 'data'
+ for _, data in inp_data.items():
+ # For loop odd, but sort simulating behavior in galaxy.tools.actions
+ if not data:
+ continue
+ input_ext = data.ext
+
param_dict = dict( [ ( p.name, p.value ) for p in job.parameters ] ) # why not re-use self.param_dict here? ##dunno...probably should, this causes tools.parameters.basic.UnvalidatedValue to be used in following methods instead of validated and transformed values during i.e. running workflows
param_dict = self.tool.params_from_strings( param_dict, self.app )
# Check for and move associated_files
@@ -1075,7 +1082,7 @@
# Create generated output children and primary datasets and add to param_dict
collected_datasets = {
'children': self.tool.collect_child_datasets(out_data, self.working_directory),
- 'primary': self.tool.collect_primary_datasets(out_data, self.working_directory)
+ 'primary': self.tool.collect_primary_datasets(out_data, self.working_directory, input_ext)
}
param_dict.update({'__collected_datasets__': collected_datasets})
# Certain tools require tasks to be completed after job execution
diff -r c4519bae84d32d638d8fe168c6fbcab40f4e8449 -r d9ab8058b2748de6b4b1a17646c2d7456d709877 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2869,12 +2869,12 @@
self.sa_session.flush()
return children
- def collect_primary_datasets( self, output, job_working_directory ):
+ def collect_primary_datasets( self, output, job_working_directory, input_ext ):
"""
Find any additional datasets generated by a tool and attach (for
cases where number of outputs is not known in advance).
"""
- return output_collect.collect_primary_datatasets( self, output, job_working_directory )
+ return output_collect.collect_primary_datasets( self, output, job_working_directory, input_ext )
def to_dict( self, trans, link_details=False, io_details=False ):
""" Returns dict of tool. """
diff -r c4519bae84d32d638d8fe168c6fbcab40f4e8449 -r d9ab8058b2748de6b4b1a17646c2d7456d709877 lib/galaxy/tools/parameters/output_collect.py
--- a/lib/galaxy/tools/parameters/output_collect.py
+++ b/lib/galaxy/tools/parameters/output_collect.py
@@ -14,7 +14,7 @@
DEFAULT_EXTRA_FILENAME_PATTERN = r"primary_DATASET_ID_(?P<designation>[^_]+)_(?P<visible>[^_]+)_(?P<ext>[^_]+)(_(?P<dbkey>[^_]+))?"
-def collect_primary_datatasets( tool, output, job_working_directory ):
+def collect_primary_datasets( tool, output, job_working_directory, input_ext ):
app = tool.app
sa_session = tool.sa_session
new_primary_datasets = {}
@@ -66,6 +66,8 @@
designation = fields_match.designation
visible = fields_match.visible
ext = fields_match.ext
+ if ext == "input":
+ ext = input_ext
dbkey = fields_match.dbkey
# Create new primary dataset
primary_data = app.model.HistoryDatasetAssociation( extension=ext,
diff -r c4519bae84d32d638d8fe168c6fbcab40f4e8449 -r d9ab8058b2748de6b4b1a17646c2d7456d709877 test/functional/tools/multi_output_configured.xml
--- a/test/functional/tools/multi_output_configured.xml
+++ b/test/functional/tools/multi_output_configured.xml
@@ -8,19 +8,24 @@
echo "1" > subdir2/CUSTOM_1.txt;
echo "2" > subdir2/CUSTOM_2.tabular;
echo "3" > subdir2/CUSTOM_3.txt;
+ mkdir subdir3;
+ echo "Foo" > subdir3/Foo;
</command><inputs>
- <param name="input" type="integer" value="7" />
+ <param name="num_param" type="integer" value="7" />
+ <param name="input" type="data" /></inputs><outputs><data format="txt" name="report"><discover_datasets pattern="__designation_and_ext__" directory="subdir1" /><discover_datasets pattern="CUSTOM_(?P<designation>.+)\.(?P<ext>.+)" directory="subdir2" />
+ <discover_datasets pattern="__designation__" directory="subdir3" ext="input" /></data></outputs><tests><test>
- <param name="input" value="7" />
+ <param name="num_param" value="7" />
+ <param name="input" ftype="txt" value="simple_line.txt"/><output name="report"><assert_contents><has_line line="Hello" />
@@ -37,6 +42,9 @@
<discovered_dataset designation="2" ftype="tabular"><assert_contents><has_line line="2" /></assert_contents></discovered_dataset>
+ <discovered_dataset designation="Foo" ftype="txt">
+ <assert_contents><has_line line="Foo" /></assert_contents>
+ </discovered_dataset></output></test></tests>
https://bitbucket.org/galaxy/galaxy-central/commits/5874f6bc02f9/
Changeset: 5874f6bc02f9
User: davebgx
Date: 2014-07-08 19:22:06
Summary: Merge stable.
Affected #: 2 files
https://bitbucket.org/galaxy/galaxy-central/commits/6b1415cbc145/
Changeset: 6b1415cbc145
Branch: daniel_blanchard/update-drmaa-python-version-in-eggsini-t-1402925026891
User: davebgx
Date: 2014-07-08 19:22:35
Summary: Close branch daniel_blanchard/update-drmaa-python-version-in-eggsini-t-1402925026891
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Expose OutputParameterJSONTool (tool_type='output_parameter_json') for direct usage. Previously used as inherited by data source and data manager tools.
by commits-noreply@bitbucket.org 07 Jul '14
by commits-noreply@bitbucket.org 07 Jul '14
07 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4eeb41f2fa75/
Changeset: 4eeb41f2fa75
User: dan
Date: 2014-07-07 17:11:30
Summary: Expose OutputParameterJSONTool (tool_type='output_parameter_json') for direct usage. Previously used as inherited by data source and data manager tools.
Affected #: 1 file
diff -r 86308479b291e2295ecba6bcb3c33864abc18860 -r 4eeb41f2fa7569e1d8652d9a0be3a1f1d5c6c2c0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -3213,7 +3213,9 @@
# Populate tool_type to ToolClass mappings
tool_types = {}
-for tool_class in [ Tool, DataDestinationTool, SetMetadataTool, DataSourceTool, AsyncDataSourceTool, DataManagerTool ]:
+for tool_class in [ Tool, SetMetadataTool, OutputParameterJSONTool,
+ DataManagerTool, DataSourceTool, AsyncDataSourceTool,
+ DataDestinationTool ]:
tool_types[ tool_class.tool_type ] = tool_class
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0b0f39f80bb9/
Changeset: 0b0f39f80bb9
User: jmchilton
Date: 2014-07-07 04:08:38
Summary: Docker - Extract helper method defining prefix for Docker CLIs.
Affected #: 2 files
diff -r 6d50cd22380253129121e3e7359a6286ee9b1663 -r 0b0f39f80bb916669ddaac3f17054b26525f906f lib/galaxy/tools/deps/containers.py
--- a/lib/galaxy/tools/deps/containers.py
+++ b/lib/galaxy/tools/deps/containers.py
@@ -191,6 +191,14 @@
# TODO: Remove redundant volumes...
volumes = docker_util.DockerVolume.volumes_from_str(volumes_raw)
volumes_from = self.destination_info.get("docker_volumes_from", docker_util.DEFAULT_VOLUMES_FROM)
+
+ docker_host_props = dict(
+ docker_cmd=prop("cmd", docker_util.DEFAULT_DOCKER_COMMAND),
+ sudo=asbool(prop("sudo", docker_util.DEFAULT_SUDO)),
+ sudo_cmd=prop("sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND),
+ host=prop("host", docker_util.DEFAULT_HOST),
+ )
+
return docker_util.build_docker_run_command(
command,
self.container_id,
@@ -198,11 +206,8 @@
volumes_from=volumes_from,
env_directives=env_directives,
working_directory=working_directory,
- docker_cmd=prop("cmd", docker_util.DEFAULT_DOCKER_COMMAND),
- sudo=asbool(prop("sudo", docker_util.DEFAULT_SUDO)),
- sudo_cmd=prop("sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND),
- host=prop("host", docker_util.DEFAULT_HOST),
- net=prop("net", "none") # By default, docker instance has networking disabled
+ net=prop("net", "none"), # By default, docker instance has networking disabled
+ **docker_host_props
)
def __expand_str(self, value):
diff -r 6d50cd22380253129121e3e7359a6286ee9b1663 -r 0b0f39f80bb916669ddaac3f17054b26525f906f lib/galaxy/tools/deps/docker_util.py
--- a/lib/galaxy/tools/deps/docker_util.py
+++ b/lib/galaxy/tools/deps/docker_util.py
@@ -54,24 +54,19 @@
container_command,
image,
tag=None,
- docker_cmd=DEFAULT_DOCKER_COMMAND,
volumes=[],
volumes_from=DEFAULT_VOLUMES_FROM,
memory=DEFAULT_MEMORY,
env_directives=[],
working_directory=DEFAULT_WORKING_DIRECTORY,
+ name=None,
+ net=DEFAULT_NET,
+ docker_cmd=DEFAULT_DOCKER_COMMAND,
sudo=DEFAULT_SUDO,
sudo_cmd=DEFAULT_SUDO_COMMAND,
- name=None,
host=DEFAULT_HOST,
- net=DEFAULT_NET,
):
- command_parts = []
- if sudo:
- command_parts.append(sudo_cmd)
- command_parts.append(docker_cmd)
- if host:
- command_parts.append(["-H", host])
+ command_parts = __docker_prefix(docker_cmd, sudo, sudo_cmd, host)
command_parts.append("run")
for env_directive in env_directives:
command_parts.extend(["-e", env_directive])
@@ -93,3 +88,15 @@
command_parts.append(full_image)
command_parts.append(container_command)
return " ".join(command_parts)
+
+
+def __docker_prefix(docker_cmd, sudo, sudo_cmd, host):
+ """ Prefix to issue a docker command.
+ """
+ command_parts = []
+ if sudo:
+ command_parts.append(sudo_cmd)
+ command_parts.append(docker_cmd)
+ if host:
+ command_parts.append(["-H", host])
+ return command_parts
https://bitbucket.org/galaxy/galaxy-central/commits/86308479b291/
Changeset: 86308479b291
User: jmchilton
Date: 2014-07-07 04:08:38
Summary: Docker - Explicitly search for and pull images (if needed) prior to use.
This should prevent successful docker caching from interfering with job outputs on the first execution of tools.
This needs to be better - the output should be capture in such a way that admins can view but it doesn't interfer with the job and there should be an option to cache on the handler (instead of in the job) - for shared clusters without web access.
Affected #: 2 files
diff -r 0b0f39f80bb916669ddaac3f17054b26525f906f -r 86308479b291e2295ecba6bcb3c33864abc18860 lib/galaxy/tools/deps/containers.py
--- a/lib/galaxy/tools/deps/containers.py
+++ b/lib/galaxy/tools/deps/containers.py
@@ -199,7 +199,8 @@
host=prop("host", docker_util.DEFAULT_HOST),
)
- return docker_util.build_docker_run_command(
+ cache_command = docker_util.build_docker_cache_command(self.container_id, **docker_host_props)
+ run_command = docker_util.build_docker_run_command(
command,
self.container_id,
volumes=volumes,
@@ -209,6 +210,7 @@
net=prop("net", "none"), # By default, docker instance has networking disabled
**docker_host_props
)
+ return "%s\n%s" % (cache_command, run_command)
def __expand_str(self, value):
if not value:
diff -r 0b0f39f80bb916669ddaac3f17054b26525f906f -r 86308479b291e2295ecba6bcb3c33864abc18860 lib/galaxy/tools/deps/docker_util.py
--- a/lib/galaxy/tools/deps/docker_util.py
+++ b/lib/galaxy/tools/deps/docker_util.py
@@ -50,6 +50,24 @@
return ":".join([self.from_path, self.to_path, self.how])
+def build_docker_cache_command(
+ image,
+ docker_cmd=DEFAULT_DOCKER_COMMAND,
+ sudo=DEFAULT_SUDO,
+ sudo_cmd=DEFAULT_SUDO_COMMAND,
+ host=DEFAULT_HOST,
+):
+ inspect_command_parts = __docker_prefix(docker_cmd, sudo, sudo_cmd, host)
+ inspect_command_parts.extend(["inspect", image])
+ inspect_image_command = " ".join(inspect_command_parts)
+
+ pull_command_parts = __docker_prefix(docker_cmd, sudo, sudo_cmd, host)
+ pull_command_parts.extend(["pull", image])
+ pull_image_command = " ".join(pull_command_parts)
+ cache_command = "%s > /dev/null 2>&1\n[ $? -ne 0 ] && %s > /dev/null 2>&1\n" % (inspect_image_command, pull_image_command)
+ return cache_command
+
+
def build_docker_run_command(
container_command,
image,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Close branch BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881
by commits-noreply@bitbucket.org 06 Jul '14
by commits-noreply@bitbucket.org 06 Jul '14
06 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7ca8920eaf69/
Changeset: 7ca8920eaf69
Branch: BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881
User: jmchilton
Date: 2014-07-07 02:18:04
Summary: Close branch BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Merged in BjoernGruening/galaxy-central-1/BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881 (pull request #427)
by commits-noreply@bitbucket.org 06 Jul '14
by commits-noreply@bitbucket.org 06 Jul '14
06 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6d50cd223802/
Changeset: 6d50cd223802
User: jmchilton
Date: 2014-07-07 02:18:04
Summary: Merged in BjoernGruening/galaxy-central-1/BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881 (pull request #427)
Add empty_field validator to gtf2bedgraph.xml
Affected #: 1 file
diff -r feecd55a179edf7c9c8021110aa125f1854bef2d -r 6d50cd22380253129121e3e7359a6286ee9b1663 tools/filters/gtf2bedgraph.xml
--- a/tools/filters/gtf2bedgraph.xml
+++ b/tools/filters/gtf2bedgraph.xml
@@ -3,7 +3,9 @@
<command interpreter="python">gtf_to_bedgraph_converter.py $input $out_file1 $attribute_name</command><inputs><param format="gtf" name="input" type="data" label="Convert this query"/>
- <param name="attribute_name" type="text" label="Attribute to Use for Value"/>
+ <param name="attribute_name" type="text" label="Attribute to Use for Value">
+ <validator type="empty_field" />
+ </param></inputs><outputs><data format="bedgraph" name="out_file1" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/72114674b59f/
Changeset: 72114674b59f
Branch: BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881
User: BjoernGruening
Date: 2014-06-28 00:21:37
Summary: Add empty_field validator to gtf2bedgraph.xml
Affected #: 1 file
diff -r 2577b386dbbe421e80c816abf7683359ab64fe1f -r 72114674b59f516b635d120c2d8b1f69fd6de651 tools/filters/gtf2bedgraph.xml
--- a/tools/filters/gtf2bedgraph.xml
+++ b/tools/filters/gtf2bedgraph.xml
@@ -3,7 +3,9 @@
<command interpreter="python">gtf_to_bedgraph_converter.py $input $out_file1 $attribute_name</command><inputs><param format="gtf" name="input" type="data" label="Convert this query"/>
- <param name="attribute_name" type="text" label="Attribute to Use for Value"/>
+ <param name="attribute_name" type="text" label="Attribute to Use for Value">
+ <validator type="empty_field" />
+ </param></inputs><outputs><data format="bedgraph" name="out_file1" />
https://bitbucket.org/galaxy/galaxy-central/commits/6d50cd223802/
Changeset: 6d50cd223802
User: jmchilton
Date: 2014-07-07 02:18:04
Summary: Merged in BjoernGruening/galaxy-central-1/BjoernGruening/add-empty_field-validator-to-gtf2bedgrap-1403907588881 (pull request #427)
Add empty_field validator to gtf2bedgraph.xml
Affected #: 1 file
diff -r feecd55a179edf7c9c8021110aa125f1854bef2d -r 6d50cd22380253129121e3e7359a6286ee9b1663 tools/filters/gtf2bedgraph.xml
--- a/tools/filters/gtf2bedgraph.xml
+++ b/tools/filters/gtf2bedgraph.xml
@@ -3,7 +3,9 @@
<command interpreter="python">gtf_to_bedgraph_converter.py $input $out_file1 $attribute_name</command><inputs><param format="gtf" name="input" type="data" label="Convert this query"/>
- <param name="attribute_name" type="text" label="Attribute to Use for Value"/>
+ <param name="attribute_name" type="text" label="Attribute to Use for Value">
+ <validator type="empty_field" />
+ </param></inputs><outputs><data format="bedgraph" name="out_file1" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: cleanup and better efficiency for 0932660.
by commits-noreply@bitbucket.org 06 Jul '14
by commits-noreply@bitbucket.org 06 Jul '14
06 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/feecd55a179e/
Changeset: feecd55a179e
User: jgoecks
Date: 2014-07-06 17:55:03
Summary: Trackster: cleanup and better efficiency for 0932660.
Affected #: 1 file
diff -r 4139740da48e24d96a475da67613be92fbdba478 -r feecd55a179edf7c9c8021110aa125f1854bef2d static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2952,12 +2952,6 @@
* drawn/fetched and shown.
*/
postdraw_actions: function(tiles, width, w_scale, clear_after) {
- // If reference track is visible, adjust viewport to be smaller so that bottom content
- // is visible.
- if (this.view.reference_track.tiles_div.is(':visible')) {
- this.view.resize_viewport();
- }
-
var line_track_tiles = _.filter(tiles, function(tile) {
return (tile instanceof LineTrackTile);
});
@@ -3639,14 +3633,30 @@
* Draws and shows tile if reference data can be displayed; otherwise track is hidden.
*/
draw_helper: function(region, w_scale, options) {
+ var cur_visible = this.tiles_div.is(':visible'),
+ new_visible,
+ tile = null;
if (w_scale > this.view.canvas_manager.char_width_px) {
this.tiles_div.show();
- return TiledTrack.prototype.draw_helper.call(this, region, w_scale, options);
+ new_visible = true;
+ tile = TiledTrack.prototype.draw_helper.call(this, region, w_scale, options);
}
else {
+ new_visible = false;
this.tiles_div.hide();
- return null;
}
+
+ // NOTE: viewport resizing conceptually belongs in postdraw_actions(), but currently
+ // postdraw_actions is not called when reference track not shown due to no tiles. If
+ // it is moved to postdraw_actions, resize must be called each time because cannot
+ // easily detect showing/hiding.
+
+ // If showing or hiding reference track, resize viewport.
+ if (cur_visible !== new_visible) {
+ this.view.resize_viewport();
+ }
+
+ return tile;
},
can_subset: function(entry) { return true; },
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/503392692e9b/
Changeset: 503392692e9b
User: jmchilton
Date: 2014-07-04 03:41:09
Summary: Fix unit tests broken with 4de240e.
Affected #: 1 file
diff -r 63a7243c12dad6c9bf8ab1c58213405e38a5c17e -r 503392692e9b8145fe17886c7f2bec934fa7ebc0 test/unit/tools/test_collect_primary_datasets.py
--- a/test/unit/tools/test_collect_primary_datasets.py
+++ b/test/unit/tools/test_collect_primary_datasets.py
@@ -191,7 +191,7 @@
def _collect( self, job_working_directory=None ):
if not job_working_directory:
job_working_directory = self.test_directory
- return self.tool.collect_primary_datasets( self.outputs, job_working_directory )
+ return self.tool.collect_primary_datasets( self.outputs, job_working_directory, "txt" )
def _replace_output_collectors( self, xml_str ):
# Rewrite tool as if it had been created with output containing
https://bitbucket.org/galaxy/galaxy-central/commits/4139740da48e/
Changeset: 4139740da48e
User: jmchilton
Date: 2014-07-04 03:41:09
Summary: Fix tool multi-run inside conditionals and repeats during state updates.
Fixes multirun inside of conditional, repeats if tool form state updated (e.g. because conditional param updated or repeat block added).
More tests - ugly tests - but tests.
Affected #: 2 files
diff -r 503392692e9b8145fe17886c7f2bec934fa7ebc0 -r 4139740da48e24d96a475da67613be92fbdba478 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2507,13 +2507,16 @@
incoming_value = get_incoming_value( incoming, key, None )
value, error = check_param( trans, input, incoming_value, context, source=source )
# If a callback was provided, allow it to process the value
+ input_name = input.name
if item_callback:
- old_value = state.get( input.name, None )
+ old_value = state.get( input_name, None )
value, error = item_callback( trans, key, input, value, error, old_value, context )
if error:
- errors[ input.name ] = error
- state[ input.name ] = value
- state.update( self.__meta_properties_for_state( key, incoming, incoming_value, value ) )
+ errors[ input_name ] = error
+
+ state[ input_name ] = value
+ meta_properties = self.__meta_properties_for_state( key, incoming, incoming_value, value, input_name )
+ state.update( meta_properties )
return errors
def __remove_meta_properties( self, incoming ):
@@ -2527,12 +2530,17 @@
del result[ key ]
return result
- def __meta_properties_for_state( self, key, incoming, incoming_val, state_val ):
+ def __meta_properties_for_state( self, key, incoming, incoming_val, state_val, input_name ):
meta_properties = {}
- multirun_key = "%s|__multirun__" % key
- if multirun_key in incoming:
- multi_value = incoming[ multirun_key ]
- meta_properties[ multirun_key ] = multi_value
+ meta_property_suffixes = [
+ "__multirun__",
+ "__collection_multirun__",
+ ]
+ for meta_property_suffix in meta_property_suffixes:
+ multirun_key = "%s|%s" % ( key, meta_property_suffix )
+ if multirun_key in incoming:
+ multi_value = incoming[ multirun_key ]
+ meta_properties[ "%s|%s" % ( input_name, meta_property_suffix ) ] = multi_value
return meta_properties
@property
diff -r 503392692e9b8145fe17886c7f2bec934fa7ebc0 -r 4139740da48e24d96a475da67613be92fbdba478 test/unit/tools/test_execution.py
--- a/test/unit/tools/test_execution.py
+++ b/test/unit/tools/test_execution.py
@@ -38,6 +38,7 @@
self.setup_app()
self.history = galaxy.model.History()
self.trans = MockTrans( self.app, self.history )
+ self.app.dataset_collections_service = MockCollectionService()
self.tool_action = MockAction( self.trans )
def tearDown(self):
@@ -180,6 +181,39 @@
self.__assert_state_serializable( state )
self.assertEquals( state.inputs[ "param1|__multirun__" ], [ 1, 2 ] )
+ def test_simple_collection_multirun_state_update( self ):
+ hdca = self.__setup_collection_multirun_job()
+ encoded_id = self.app.security.encode_id(hdca.id)
+ template, template_vars = self.__handle_with_incoming( **{
+ "param1|__collection_multirun__": encoded_id,
+ } )
+ state = self.__assert_rerenders_tool_without_errors( template, template_vars )
+ self.__assert_state_serializable( state )
+ self.assertEquals( state.inputs[ "param1|__collection_multirun__" ], encoded_id )
+
+ def test_repeat_multirun_state_updates( self ):
+ self._init_tool( REPEAT_TOOL_CONTENTS )
+
+ # Fresh state contains no repeat elements
+ self.__handle_with_incoming()
+ # Hitting add button adds repeat element
+ template, template_vars = self.__handle_with_incoming(**{
+ "param1|__multirun__": [ 1, 2 ],
+ "repeat1_add": "dummy",
+ })
+ state = self.__assert_rerenders_tool_without_errors( template, template_vars )
+ self.assertEquals( state.inputs[ "param1|__multirun__" ], [ 1, 2 ] )
+ assert len( state.inputs[ "repeat1" ] ) == 1
+
+ # Hitting add button again adds another repeat element
+ template, template_vars = self.__handle_with_incoming( state, **{
+ "repeat1_0|param2|__multirun__": [ 1, 2 ],
+ "repeat1_add": "dummy",
+ } )
+ state = self.__assert_rerenders_tool_without_errors( template, template_vars )
+ self.assertEquals( state.inputs[ "param1|__multirun__" ], [ 1, 2 ] )
+ self.assertEquals( state.inputs[ "repeat1" ][0][ "param2|__multirun__" ], [ 1, 2 ] )
+
def test_simple_multirun_execution( self ):
hda1, hda2 = self.__setup_multirun_job()
template, template_vars = self.__handle_with_incoming( **{
@@ -276,6 +310,11 @@
hda1, hda2 = self.__add_dataset( 1 ), self.__add_dataset( 2 )
return hda1, hda2
+ def __setup_collection_multirun_job( self ):
+ self._init_tool( tools_support.SIMPLE_CAT_TOOL_CONTENTS )
+ hdca = self.__add_collection_dataset( 1 )
+ return hdca
+
def __handle_with_incoming( self, previous_state=None, **kwds ):
""" Execute tool.handle_input with incoming specified by kwds
(optionally extending a previous state).
@@ -310,6 +349,17 @@
self.history.datasets.append( hda )
return hda
+ def __add_collection_dataset( self, id, *hdas ):
+ hdca = galaxy.model.HistoryDatasetCollectionAssociation()
+ hdca.id = id
+ collection = galaxy.model.DatasetCollection()
+ hdca.collection = collection
+ collection.elements = [ galaxy.model.DatasetCollectionElement(element=self.__add_dataset( 1 )) ]
+
+ self.trans.sa_session.model_objects[ galaxy.model.HistoryDatasetCollectionAssociation ][ id ] = hdca
+ self.history.dataset_collections.append( hdca )
+ return hdca
+
def __assert_rerenders_tool_without_errors( self, template, template_vars ):
assert template == "tool_form.mako"
self.__assert_no_errors( template_vars )
@@ -392,3 +442,12 @@
def get_history( self ):
return self.history
+
+
+class MockCollectionService( object ):
+
+ def __init__( self ):
+ self.collection_info = object()
+
+ def match_collections( self, collections_to_match ):
+ return self.collection_info
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guerler: Fix trackster embedded grids
by commits-noreply@bitbucket.org 02 Jul '14
by commits-noreply@bitbucket.org 02 Jul '14
02 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/63a7243c12da/
Changeset: 63a7243c12da
User: guerler
Date: 2014-07-02 22:24:33
Summary: Fix trackster embedded grids
Affected #: 2 files
diff -r 58baf1eb2489ea11c595ec5d7f8fe3c707a9e3aa -r 63a7243c12dad6c9bf8ab1c58213405e38a5c17e static/scripts/mvc/grid/grid-view.js
--- a/static/scripts/mvc/grid/grid-view.js
+++ b/static/scripts/mvc/grid/grid-view.js
@@ -568,6 +568,12 @@
}
// refresh grid
+ if (href) {
+ this.go_to(inbound, href);
+ return false;
+ }
+
+ // refresh grid
if (this.grid.get('async')) {
this.update_grid();
} else {
diff -r 58baf1eb2489ea11c595ec5d7f8fe3c707a9e3aa -r 63a7243c12dad6c9bf8ab1c58213405e38a5c17e static/scripts/packed/mvc/grid/grid-view.js
--- a/static/scripts/packed/mvc/grid/grid-view.js
+++ b/static/scripts/packed/mvc/grid/grid-view.js
@@ -1,1 +1,1 @@
-jQuery.ajaxSettings.traditional=true;define(["mvc/grid/grid-model","mvc/grid/grid-template","mvc/ui"],function(b,a){return Backbone.View.extend({grid:null,initialize:function(c){this.setElement("#grid-container");if(c.use_panels){$("#center").css({padding:"10px",overflow:"auto"})}this.init_grid(c)},handle_refresh:function(c){if(c){if($.inArray("history",c)>-1){if(top.Galaxy&&top.Galaxy.currHistoryPanel){top.Galaxy.currHistoryPanel.loadCurrentHistory()}}}},init_grid:function(f){this.grid=new b(f);var e=this.grid.attributes;this.handle_refresh(e.refresh_frames);var d=this.grid.get("url_base");d=d.replace(/^.*\/\/[^\/]+/,"");this.grid.set("url_base",d);this.$el.html(a.grid(e));this.$el.find("#grid-table-header").html(a.header(e));this.$el.find("#grid-table-body").html(a.body(e));this.$el.find("#grid-table-footer").html(a.footer(e));if(e.message){this.$el.find("#grid-message").html(a.message(e));var c=this;if(e.use_hide_message){setTimeout(function(){c.$el.find("#grid-message").html("")},5000)}}this.init_grid_elements();this.init_grid_controls();init_refresh_on_change()},init_grid_controls:function(){var c=this;this.$el.find(".operation-button").each(function(){$(this).off();$(this).click(function(){c.submit_operation(this,operation.confirm);return false})});this.$el.find("input[type=text]").each(function(){$(this).off();$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})});this.$el.find(".sort-link").each(function(){$(this).off();$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});this.$el.find(".text-filter-form").each(function(){$(this).off();$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h);return false})});this.$el.find(".text-filter-val > a").each(function(){$(this).off();$(this).click(function(){$(this).parent().remove();c.remove_filter_condition($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});this.$el.find(".categorical-filter > a").each(function(){$(this).off();$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});var e=this.$el.find("#input-tags-filter");if(e.length){e.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var d=this.$el.find("#input-name-filter");if(d.length){d.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}this.$el.find(".advanced-search-toggle").each(function(){$(this).off();$(this).click(function(){c.$el.find("#standard-search").slideToggle("fast");c.$el.find("#advanced-search").slideToggle("fast");return false})});this.$el.find("#check_all").off();this.$el.find("#check_all").on("click",function(){c.check_all_items()})},init_grid_elements:function(){this.$el.find(".grid").each(function(){var j=$(this).find("input.grid-row-select-checkbox");var i=$(this).find("span.grid-selected-count");var r=function(){i.text($(j).filter(":checked").length)};$(j).each(function(){$(this).change(r)});r()});if(this.$el.find(".community_rating_star").length!==0){this.$el.find(".community_rating_star").rating({})}var q=this.grid.attributes;var p=this;this.$el.find(".page-link > a").each(function(){$(this).click(function(){p.set_page($(this).attr("page_num"));return false})});this.$el.find(".use-inbound").each(function(){$(this).click(function(i){p.execute({href:$(this).attr("href"),inbound:true});return false})});this.$el.find(".use-outbound").each(function(){$(this).click(function(i){p.execute({href:$(this).attr("href")});return false})});var f=q.items.length;if(f==0){return}for(var k in q.items){var o=q.items[k];var l=this.$el.find("#grid-"+k+"-popup");l.off();var d=new PopupMenu(l);for(var h in q.operations){var e=q.operations[h];var m=e.label;var c=o.operation_config[m];var g=o.encode_id;if(c.allowed&&e.allow_popup){var n={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};n.func=function(r){r.preventDefault();var j=$(r.target).html();var i=this.findItemByHtml(j);p.execute(i)};d.addItem(n)}}}},add_filter_condition:function(e,g){if(g===""){return false}this.grid.add_filter(e,g,true);var f=$(a.filter_element(e,g));var d=this;f.click(function(){$(this).remove();d.remove_filter_condition(e,g)});var c=this.$el.find("#"+e+"-filtering-criteria");c.append(f);this.go_page_one();this.execute()},remove_filter_condition:function(c,d){this.grid.remove_filter(c,d);this.go_page_one();this.execute()},set_sort_condition:function(g){var f=this.grid.get("sort_key");var e=g;if(f.indexOf(g)!==-1){if(f.substring(0,1)!=="-"){e="-"+g}}this.$el.find(".sort-arrow").remove();var d=(e.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+d+"</span>").addClass("sort-arrow");this.$el.find("#"+g+"-header").append(c);this.grid.set("sort_key",e);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;this.$el.find("."+e+"-filter").each(function(){var k=$.trim($(this).text());var i=d[k];var j=i[e];if(j==g){$(this).empty();$(this).addClass("current-filter");$(this).append(k)}else{if(j==f){$(this).empty();var h=$('<a href="#">'+k+"</a>");h.click(function(){c.set_categorical_filter(e,j)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;this.$el.find(".page-link").each(function(){var i=$(this).attr("id"),g=parseInt(i.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$('<a href="#">'+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(d,g){var e=$(d).val();var f=this.$el.find('input[name="id"]:checked').length;if(!f>0){return false}var c=[];this.$el.find("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:e,id:c,confirmation_text:g});return true},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}},execute:function(l){var f=null;var e=null;var g=null;var c=null;var k=null;if(l){e=l.href;g=l.operation;f=l.id;c=l.confirmation_text;k=l.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var j=e.split("?");if(j.length>1){var i=j[1];var d=i.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"&&c!="null"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(k,e)}return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(k,e)}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=this.$el.find("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");this.$el.find(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(e){var g=c.grid.get("embedded");var h=c.grid.get("insert");var f=$.parseJSON(e);f.embedded=g;f.insert=h;c.init_grid(f);c.$el.find(".loading-elt-overlay").hide()},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})}})});
\ No newline at end of file
+jQuery.ajaxSettings.traditional=true;define(["mvc/grid/grid-model","mvc/grid/grid-template","mvc/ui"],function(b,a){return Backbone.View.extend({grid:null,initialize:function(c){this.setElement("#grid-container");if(c.use_panels){$("#center").css({padding:"10px",overflow:"auto"})}this.init_grid(c)},handle_refresh:function(c){if(c){if($.inArray("history",c)>-1){if(top.Galaxy&&top.Galaxy.currHistoryPanel){top.Galaxy.currHistoryPanel.loadCurrentHistory()}}}},init_grid:function(f){this.grid=new b(f);var e=this.grid.attributes;this.handle_refresh(e.refresh_frames);var d=this.grid.get("url_base");d=d.replace(/^.*\/\/[^\/]+/,"");this.grid.set("url_base",d);this.$el.html(a.grid(e));this.$el.find("#grid-table-header").html(a.header(e));this.$el.find("#grid-table-body").html(a.body(e));this.$el.find("#grid-table-footer").html(a.footer(e));if(e.message){this.$el.find("#grid-message").html(a.message(e));var c=this;if(e.use_hide_message){setTimeout(function(){c.$el.find("#grid-message").html("")},5000)}}this.init_grid_elements();this.init_grid_controls();init_refresh_on_change()},init_grid_controls:function(){var c=this;this.$el.find(".operation-button").each(function(){$(this).off();$(this).click(function(){c.submit_operation(this,operation.confirm);return false})});this.$el.find("input[type=text]").each(function(){$(this).off();$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})});this.$el.find(".sort-link").each(function(){$(this).off();$(this).click(function(){c.set_sort_condition($(this).attr("sort_key"));return false})});this.$el.find(".text-filter-form").each(function(){$(this).off();$(this).submit(function(){var g=$(this).attr("column_key");var f=$("#input-"+g+"-filter");var h=f.val();f.val("");c.add_filter_condition(g,h);return false})});this.$el.find(".text-filter-val > a").each(function(){$(this).off();$(this).click(function(){$(this).parent().remove();c.remove_filter_condition($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});this.$el.find(".categorical-filter > a").each(function(){$(this).off();$(this).click(function(){c.set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});var e=this.$el.find("#input-tags-filter");if(e.length){e.autocomplete(this.grid.history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var d=this.$el.find("#input-name-filter");if(d.length){d.autocomplete(this.grid.history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}this.$el.find(".advanced-search-toggle").each(function(){$(this).off();$(this).click(function(){c.$el.find("#standard-search").slideToggle("fast");c.$el.find("#advanced-search").slideToggle("fast");return false})});this.$el.find("#check_all").off();this.$el.find("#check_all").on("click",function(){c.check_all_items()})},init_grid_elements:function(){this.$el.find(".grid").each(function(){var j=$(this).find("input.grid-row-select-checkbox");var i=$(this).find("span.grid-selected-count");var r=function(){i.text($(j).filter(":checked").length)};$(j).each(function(){$(this).change(r)});r()});if(this.$el.find(".community_rating_star").length!==0){this.$el.find(".community_rating_star").rating({})}var q=this.grid.attributes;var p=this;this.$el.find(".page-link > a").each(function(){$(this).click(function(){p.set_page($(this).attr("page_num"));return false})});this.$el.find(".use-inbound").each(function(){$(this).click(function(i){p.execute({href:$(this).attr("href"),inbound:true});return false})});this.$el.find(".use-outbound").each(function(){$(this).click(function(i){p.execute({href:$(this).attr("href")});return false})});var f=q.items.length;if(f==0){return}for(var k in q.items){var o=q.items[k];var l=this.$el.find("#grid-"+k+"-popup");l.off();var d=new PopupMenu(l);for(var h in q.operations){var e=q.operations[h];var m=e.label;var c=o.operation_config[m];var g=o.encode_id;if(c.allowed&&e.allow_popup){var n={html:e.label,href:c.url_args,target:c.target,confirmation_text:e.confirm,inbound:e.inbound};n.func=function(r){r.preventDefault();var j=$(r.target).html();var i=this.findItemByHtml(j);p.execute(i)};d.addItem(n)}}}},add_filter_condition:function(e,g){if(g===""){return false}this.grid.add_filter(e,g,true);var f=$(a.filter_element(e,g));var d=this;f.click(function(){$(this).remove();d.remove_filter_condition(e,g)});var c=this.$el.find("#"+e+"-filtering-criteria");c.append(f);this.go_page_one();this.execute()},remove_filter_condition:function(c,d){this.grid.remove_filter(c,d);this.go_page_one();this.execute()},set_sort_condition:function(g){var f=this.grid.get("sort_key");var e=g;if(f.indexOf(g)!==-1){if(f.substring(0,1)!=="-"){e="-"+g}}this.$el.find(".sort-arrow").remove();var d=(e.substring(0,1)=="-")?"↑":"↓";var c=$("<span>"+d+"</span>").addClass("sort-arrow");this.$el.find("#"+g+"-header").append(c);this.grid.set("sort_key",e);this.go_page_one();this.execute()},set_categorical_filter:function(e,g){var d=this.grid.get("categorical_filters")[e],f=this.grid.get("filters")[e];var c=this;this.$el.find("."+e+"-filter").each(function(){var k=$.trim($(this).text());var i=d[k];var j=i[e];if(j==g){$(this).empty();$(this).addClass("current-filter");$(this).append(k)}else{if(j==f){$(this).empty();var h=$('<a href="#">'+k+"</a>");h.click(function(){c.set_categorical_filter(e,j)});$(this).removeClass("current-filter");$(this).append(h)}}});this.grid.add_filter(e,g);this.go_page_one();this.execute()},set_page:function(c){var d=this;this.$el.find(".page-link").each(function(){var i=$(this).attr("id"),g=parseInt(i.split("-")[2],10),e=d.grid.get("cur_page"),h;if(g===c){h=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(h)}else{if(g===e){h=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var f=$('<a href="#">'+h+"</a>");f.click(function(){d.set_page(g)});$(this).append(f)}}});if(c==="all"){this.grid.set("cur_page",c)}else{this.grid.set("cur_page",parseInt(c,10))}this.execute()},submit_operation:function(d,g){var e=$(d).val();var f=this.$el.find('input[name="id"]:checked').length;if(!f>0){return false}var c=[];this.$el.find("input[name=id]:checked").each(function(){c.push($(this).val())});this.execute({operation:e,id:c,confirmation_text:g});return true},check_all_items:function(){var c=document.getElementById("check_all"),d=document.getElementsByTagName("input"),f=0,e;if(c.checked===true){for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=true;f++}}}else{for(e=0;e<d.length;e++){if(d[e].name.indexOf("id")!==-1){d[e].checked=false}}}this.init_grid_elements()},go_page_one:function(){var c=this.grid.get("cur_page");if(c!==null&&c!==undefined&&c!=="all"){this.grid.set("cur_page",1)}},execute:function(l){var f=null;var e=null;var g=null;var c=null;var k=null;if(l){e=l.href;g=l.operation;f=l.id;c=l.confirmation_text;k=l.inbound;if(e!==undefined&&e.indexOf("operation=")!=-1){var j=e.split("?");if(j.length>1){var i=j[1];var d=i.split("&");for(var h=0;h<d.length;h++){if(d[h].indexOf("operation")!=-1){g=d[h].split("=")[1];g=g.replace(/\+/g," ")}else{if(d[h].indexOf("id")!=-1){f=d[h].split("=")[1]}}}}}}if(g&&f){if(c&&c!=""&&c!="None"&&c!="null"){if(!confirm(c)){return false}}g=g.toLowerCase();this.grid.set({operation:g,item_ids:f});if(this.grid.can_async_op(g)){this.update_grid()}else{this.go_to(k,e)}return false}if(e){this.go_to(k,e);return false}if(this.grid.get("async")){this.update_grid()}else{this.go_to(k,e)}return false},go_to:function(f,d){var e=this.grid.get("async");this.grid.set("async",false);advanced_search=this.$el.find("#advanced-search").is(":visible");this.grid.set("advanced_search",advanced_search);if(!d){d=this.grid.get("url_base")+"?"+$.param(this.grid.get_url_data())}this.grid.set({operation:undefined,item_ids:undefined,async:e});if(f){var c=$(".grid-header").closest(".inbound");if(c.length!==0){c.load(d);return}}window.location=d},update_grid:function(){var d=(this.grid.get("operation")?"POST":"GET");this.$el.find(".loading-elt-overlay").show();var c=this;$.ajax({type:d,url:c.grid.get("url_base"),data:c.grid.get_url_data(),error:function(e){alert("Grid refresh failed")},success:function(e){var g=c.grid.get("embedded");var h=c.grid.get("insert");var f=$.parseJSON(e);f.embedded=g;f.insert=h;c.init_grid(f);c.$el.find(".loading-elt-overlay").hide()},complete:function(){c.grid.set({operation:undefined,item_ids:undefined})}})}})});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0