commit/galaxy-central: 3 new changesets
3 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/7637cfbea32e/ Changeset: 7637cfbea32e Branch: job-search User: Kyle Ellrott Date: 2014-01-29 21:51:37 Summary: Default Merge Affected #: 429 files diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 buildbot_setup.sh --- a/buildbot_setup.sh +++ b/buildbot_setup.sh @@ -4,28 +4,6 @@ : ${HOSTTYPE:=`uname -m`} -# link to HYPHY is arch-dependent -case "$OSTYPE" in - linux-gnu) - kernel=`uname -r | cut -f1,2 -d.` - HYPHY="/galaxy/software/linux$kernel-$HOSTTYPE/hyphy" - ;; - darwin*) - this_minor=`uname -r | awk -F. '{print ($1-4)}'` - machine=`machine` - for minor in `jot - 3 $this_minor 1`; do - HYPHY="/galaxy/software/macosx10.$minor-$machine/hyphy" - [ -d "$HYPHY" ] && break - done - [ ! -d "$HYPHY" ] && unset HYPHY - ;; - solaris2.10) - # For the psu-production builder which is Solaris, but jobs run on a - # Linux cluster - HYPHY="/galaxy/software/linux2.6-x86_64/hyphy" - ;; -esac - LINKS=" /galaxy/data/location/add_scores.loc /galaxy/data/location/all_fasta.loc @@ -121,12 +99,6 @@ ln -sf $link tool-data done - if [ -d "$HYPHY" ]; then - echo "Linking $HYPHY" - rm -f tool-data/HYPHY - ln -sf $HYPHY tool-data/HYPHY - fi - if [ -d "$JARS" ]; then echo "Linking $JARS" rm -f tool-data/shared/jars diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 config/plugins/visualizations/charts/static/app.js --- a/config/plugins/visualizations/charts/static/app.js +++ b/config/plugins/visualizations/charts/static/app.js @@ -37,17 +37,27 @@ this.chart_view = new ChartView(this); this.viewport_view = new ViewportView(this); - // portlet - this.portlet = new Portlet({icon : 'fa-bar-chart-o', label : 'Charts'}); + // append view port to charts viewer + this.charts_view.append(this.viewport_view.$el); + + // create portlet + if (!this.options.config.widget) { + this.portlet = new Portlet({icon : 'fa-bar-chart-o', label : 'Charts'}); + } else { + this.portlet = $('<div></div>'); + } + + // append views this.portlet.append(this.charts_view.$el); this.portlet.append(this.group_view.$el); this.portlet.append(this.chart_view.$el); - // append main - this.charts_view.append(this.viewport_view.$el); - - // set elements - this.setElement(this.portlet.$el); + // set element + if (!this.options.config.widget) { + this.setElement(this.portlet.$el); + } else { + this.setElement(this.portlet); + } // hide views this.group_view.$el.hide(); diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 config/plugins/visualizations/charts/static/library/portlet.js --- a/config/plugins/visualizations/charts/static/library/portlet.js +++ b/config/plugins/visualizations/charts/static/library/portlet.js @@ -116,21 +116,38 @@ showOperation: function(id) { this.$operations.find('#' + id).show(); }, - + + // set operation + setOperation: function(id, callback) { + var $el = this.$operations.find('#' + id); + $el.off('click'); + $el.on('click', callback); + }, + + // label + label: function(new_label) { + var $el = this.$el.find('#label'); + if (new_label) { + $el.html(new_label); + } + return $el.html(); + }, + // fill regular modal template template: function(options) { var tmpl = '<div class="toolForm">'; - if (options.label) { - tmpl += '<div id="title" class="toolFormTitle" style="padding-bottom: 7px;">' + + if (options.label || options.icon) { + tmpl += '<div id="title" class="toolFormTitle" style="overflow:hidden;">' + '<div id="operations" style="float: right;"></div>' + - '<div>'; + '<div style="overflow: hidden">'; if (options.icon) - tmpl += '<i style="font-size: 1.2em" class="icon fa ' + options.icon + '"> </i>'; + tmpl += '<i style="padding-top: 3px; float: left; font-size: 1.2em" class="icon fa ' + options.icon + '"> </i>'; - tmpl += options.label + - '</div>' + + tmpl += '<div id="label" style="padding-top: 2px; float: left;">' + options.label + '</div>'; + + tmpl += '</div>' + '</div>'; } tmpl += '<div id="body" class="toolFormBody">'; diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 config/plugins/visualizations/charts/static/models/datasets.js --- a/config/plugins/visualizations/charts/static/models/datasets.js +++ b/config/plugins/visualizations/charts/static/models/datasets.js @@ -6,9 +6,9 @@ { // options optionsDefault : { - limit : 20, + limit : 500, pace : 1000, - max : 5 + max : 2 }, // list diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 config/plugins/visualizations/charts/static/views/charts.js --- a/config/plugins/visualizations/charts/static/views/charts.js +++ b/config/plugins/visualizations/charts/static/views/charts.js @@ -45,25 +45,6 @@ self.app.chart_view.$el.show(); } }), - 'edit' : new Ui.ButtonIcon({ - icon : 'fa-pencil', - tooltip: 'Edit', - onclick: function() { - // check if element has been selected - var chart_id = self.table.value(); - if (!chart_id) { - return; - } - - // get chart - var chart = self.app.charts.get(chart_id); - self.app.chart.copy(chart); - - // show edit - self.$el.hide(); - self.app.chart_view.$el.show(); - } - }), 'delete' : new Ui.ButtonIcon({ icon : 'fa-minus', tooltip: 'Delete', @@ -100,7 +81,9 @@ this.portlet.append(this.table.$el); // append to main - this.$el.append(this.portlet.$el); + if (!this.app.options.config.widget) { + this.$el.append(this.portlet.$el); + } // events var self = this; diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 config/plugins/visualizations/charts/static/views/viewport.js --- a/config/plugins/visualizations/charts/static/views/viewport.js +++ b/config/plugins/visualizations/charts/static/views/viewport.js @@ -25,8 +25,16 @@ // add table to portlet this.portlet = new Portlet({ + label : '', + icon : 'fa-signal', height : this.options.height, - overflow : 'hidden' + overflow : 'hidden', + operations : { + 'edit' : new Ui.ButtonIcon({ + icon : 'fa-gear', + tooltip : 'Configure' + }) + } }); // set this element @@ -53,15 +61,30 @@ }, // show - show: function(id) { + show: function(chart_id) { // hide all this.$el.find('svg').hide(); - var item = this.list[id]; + // identify selected item from list + var item = this.list[chart_id]; if (item) { // show selected chart this.$el.find(item.svg_id).show(); + // get chart + var chart = self.app.charts.get(chart_id); + + // update portlet + this.portlet.label(chart.get('title')); + this.portlet.setOperation('edit', function() { + // get chart + self.app.chart.copy(chart); + + // show edit + self.app.charts_view.$el.hide(); + self.app.chart_view.$el.show(); + }); + // this trigger d3 update events $(window).trigger('resize'); } @@ -69,11 +92,17 @@ // add _addChart: function(chart) { + // link this + var self = this; + + // backup chart details + var chart_id = chart.id; + // make sure that svg does not exist already - this._removeChart(chart.id); + this._removeChart(chart_id); // create id - var svg_id = '#svg_' + chart.id; + var svg_id = '#svg_' + chart_id; // create element var chart_el = $(this._template({id: svg_id, height : this.options.height})); @@ -82,7 +111,7 @@ this.portlet.append(chart_el); // backup id - this.list[chart.id] = { + this.list[chart_id] = { svg_id : svg_id } @@ -129,8 +158,8 @@ chart_index++; }); - // add view - self.list[chart.id].view = view; + // show + self.show(chart_id); }); }, diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 doc/source/lib/galaxy.tools.util.rst --- a/doc/source/lib/galaxy.tools.util.rst +++ b/doc/source/lib/galaxy.tools.util.rst @@ -9,14 +9,6 @@ :undoc-members: :show-inheritance: -:mod:`hyphy_util` Module ------------------------- - -.. automodule:: galaxy.tools.util.hyphy_util - :members: - :undoc-members: - :show-inheritance: - :mod:`maf_utilities` Module --------------------------- diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 install_and_test_tool_shed_repositories.sh --- a/install_and_test_tool_shed_repositories.sh +++ b/install_and_test_tool_shed_repositories.sh @@ -48,6 +48,14 @@ fi fi +if [ -z $GALAXY_INSTALL_TEST_SHED_TOOL_PATH ] ; then + export GALAXY_INSTALL_TEST_SHED_TOOL_PATH='/tmp/shed_tools' +fi + +if [ ! -d $GALAXY_INSTALL_TEST_SHED_TOOL_PATH ] ; then + mkdir -p $GALAXY_INSTALL_TEST_SHED_TOOL_PATH +fi + test_tool_dependency_definitions () { # Test installation of repositories of type tool_dependency_definition. if [ -f $GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR/stage_1_complete ] ; then diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -58,7 +58,7 @@ self.openid_consumer_cache_path = resolve_path( kwargs.get( "openid_consumer_cache_path", "database/openid_consumer_cache" ), self.root ) self.cookie_path = kwargs.get( "cookie_path", "/" ) self.genome_data_path = kwargs.get( "genome_data_path", "tool-data/genome" ) - self.rsync_url = kwargs.get( "rsync_url", "rsync://scofield.bx.psu.edu/indexes" ) + self.rsync_url = kwargs.get( "rsync_url", "rsync://datacache.galaxyproject.org/indexes" ) # Galaxy OpenID settings self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) ) self.openid_config = kwargs.get( 'openid_config_file', 'openid_conf.xml' ) @@ -122,6 +122,7 @@ self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" ) self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) ) self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) ) + self.normalize_remote_user_email = string_as_bool( kwargs.get( "normalize_remote_user_email", "False" ) ) self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None ) self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' ) self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None ) @@ -151,6 +152,7 @@ h, m, s = [ int( v ) for v in self.job_walltime.split( ':' ) ] self.job_walltime_delta = timedelta( 0, s, 0, 0, m, h ) self.admin_users = kwargs.get( "admin_users", "" ) + self.reset_password_length = int( kwargs.get('reset_password_length', '15') ) self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join@bx.psu.edu") self.error_email_to = kwargs.get( 'error_email_to', None ) self.activation_email = kwargs.get( 'activation_email', None ) diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/datatypes/converters/bam_to_bai.xml --- a/lib/galaxy/datatypes/converters/bam_to_bai.xml +++ b/lib/galaxy/datatypes/converters/bam_to_bai.xml @@ -1,4 +1,7 @@ <tool id="CONVERTER_Bam_Bai_0" name="Bam to Bai" hidden="true"> + <requirements> + <requirement type="package">samtools</requirement> + </requirements><command>samtools index $input1 $output1</command><inputs><page> @@ -10,4 +13,4 @@ </outputs><help></help> -</tool> \ No newline at end of file +</tool> diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/datatypes/converters/sam_to_bam.xml --- a/lib/galaxy/datatypes/converters/sam_to_bam.xml +++ b/lib/galaxy/datatypes/converters/sam_to_bam.xml @@ -5,6 +5,9 @@ To fix this: (a) merge sam_to_bam tool in tools with this conversion (like fasta_to_len conversion); and (b) define a datatype-specific way to set converter parameters. --> + <requirements> + <requirement type="package">samtools</requirement> + </requirements><command interpreter="python">sam_to_bam.py $input1 $output</command><inputs><param name="input1" type="data" format="sam" label="SAM file"/> diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/datatypes/tabular.py --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -275,7 +275,7 @@ cursor = f.read(1) return to_json_string( { 'ck_data': util.unicodify( ck_data ), 'ck_index': ck_index + 1 } ) - def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None): + def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None, **kwd): preview = util.string_as_bool( preview ) if chunk: return self.get_chunk(trans, dataset, chunk) diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -789,8 +789,8 @@ # If the job was deleted, call tool specific fail actions (used for e.g. external metadata) and clean up if self.tool: self.tool.job_failed( self, message, exception ) - if self.app.config.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED): - self.cleanup() + delete_files = self.app.config.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED) + self.cleanup( delete_files=delete_files ) def pause( self, job=None, message=None ): if job is None: @@ -883,11 +883,10 @@ # We set final_job_state to use for dataset management, but *don't* set # job.state until after dataset collection to prevent history issues - if job.states.ERROR != job.state: - if ( self.check_tool_output( stdout, stderr, tool_exit_code, job )): - final_job_state = job.states.OK - else: - final_job_state = job.states.ERROR + if ( self.check_tool_output( stdout, stderr, tool_exit_code, job ) ): + final_job_state = job.states.OK + else: + final_job_state = job.states.ERROR if self.version_string_cmd: version_filename = self.get_version_string_path() @@ -1067,22 +1066,26 @@ self.sa_session.flush() log.debug( 'job %d ended' % self.job_id ) - if self.app.config.cleanup_job == 'always' or ( not stderr and self.app.config.cleanup_job == 'onsuccess' ): - self.cleanup() + delete_files = self.app.config.cleanup_job == 'always' or ( job.state == job.states.OK and self.app.config.cleanup_job == 'onsuccess' ) + self.cleanup( delete_files=delete_files ) def check_tool_output( self, stdout, stderr, tool_exit_code, job ): return check_output( self.tool, stdout, stderr, tool_exit_code, job ) - def cleanup( self ): - # remove temporary files + def cleanup( self, delete_files=True ): + # At least one of these tool cleanup actions (job import), is needed + # for thetool to work properly, that is why one might want to run + # cleanup but not delete files. try: - for fname in self.extra_filenames: - os.remove( fname ) - self.external_output_metadata.cleanup_external_metadata( self.sa_session ) + if delete_files: + for fname in self.extra_filenames: + os.remove( fname ) + self.external_output_metadata.cleanup_external_metadata( self.sa_session ) galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session ) galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job() galaxy.tools.genome_index.GenomeIndexToolWrapper( self.job_id ).postprocessing( self.sa_session, self.app ) - self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, extra_dir=str(self.job_id)) + if delete_files: + self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, extra_dir=str(self.job_id)) except: log.exception( "Unable to cleanup job %d" % self.job_id ) @@ -1520,8 +1523,8 @@ # if the job was deleted, don't finish it if task.state == task.states.DELETED: # Job was deleted by an administrator - if self.app.config.cleanup_job in ( 'always', 'onsuccess' ): - self.cleanup() + delete_files = self.app.config.cleanup_job in ( 'always', 'onsuccess' ) + self.cleanup( delete_files=delete_files ) return elif task.state == task.states.ERROR: self.fail( task.info ) diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -35,7 +35,9 @@ PasswordField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField) from sqlalchemy.orm import object_session +from sqlalchemy.orm import joinedload from sqlalchemy.sql.expression import func +from sqlalchemy import not_ log = logging.getLogger( __name__ ) @@ -898,6 +900,22 @@ rval = galaxy.datatypes.data.nice_size( rval ) return rval + @property + def active_datasets_children_and_roles( self ): + if not hasattr(self, '_active_datasets_children_and_roles'): + db_session = object_session( self ) + query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id ). \ + filter( not_( HistoryDatasetAssociation.deleted ) ). \ + order_by( HistoryDatasetAssociation.table.c.hid.asc() ). \ + options( + joinedload("children"), + joinedload("dataset"), + joinedload("dataset.actions"), + joinedload("dataset.actions.role"), + ) + self._active_datasets_children_and_roles = query.all() + return self._active_datasets_children_and_roles + def contents_iter( self, **kwds ): """ Fetch filtered list of contents of history. @@ -909,10 +927,10 @@ query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() ) deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) ) if deleted is not None: - query = query.filter( HistoryDatasetAssociation.deleted == bool( kwds['deleted'] ) ) + query = query.filter( HistoryDatasetAssociation.deleted == deleted ) visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) ) if visible is not None: - query = query.filter( HistoryDatasetAssociation.visible == bool( kwds['visible'] ) ) + query = query.filter( HistoryDatasetAssociation.visible == visible ) if 'ids' in kwds: ids = kwds['ids'] max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH) diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -213,7 +213,7 @@ db_datasets[ "chromInfo" ] = db_dataset incoming[ "chromInfo" ] = db_dataset.file_name else: - # -- Get chrom_info from either a custom or built-in build. -- + # -- Get chrom_info (len file) from either a custom or built-in build. -- chrom_info = None if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( input_dbkey in from_json_string( trans.user.preferences[ 'dbkeys' ] ) ): @@ -226,9 +226,13 @@ # condition below to avoid getting chrom_info when running the # fasta-to-len converter. if 'fasta' in custom_build_dict and tool.id != 'CONVERTER_fasta_to_len': + # Build is defined by fasta; get len file, which is obtained from converting fasta. build_fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'fasta' ] ) chrom_info = build_fasta_dataset.get_converted_dataset( trans, 'len' ).file_name - + elif 'len' in custom_build_dict: + # Build is defined by len file, so use it. + chrom_info = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'len' ] ).file_name + if not chrom_info: # Default to built-in build. chrom_info = os.path.join( trans.app.config.len_file_path, "%s.len" % input_dbkey ) diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -332,7 +332,7 @@ is_binary=is_binary, link_data_only=link_data_only, uuid=uuid_str, - to_posix_lines=uploaded_dataset.to_posix_lines, + to_posix_lines=getattr(uploaded_dataset, "to_posix_lines", True), space_to_tab=uploaded_dataset.space_to_tab, in_place=trans.app.config.external_chown_script is None, path=uploaded_dataset.path ) diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/imp_exp/__init__.py --- a/lib/galaxy/tools/imp_exp/__init__.py +++ b/lib/galaxy/tools/imp_exp/__init__.py @@ -1,4 +1,8 @@ -import os, shutil, logging, tempfile, json +import os +import shutil +import logging +import tempfile +import json from galaxy import model from galaxy.tools.parameters.basic import UnvalidatedValue from galaxy.web.framework.helpers import to_unicode @@ -8,6 +12,7 @@ log = logging.getLogger(__name__) + def load_history_imp_exp_tools( toolbox ): """ Adds tools for importing/exporting histories to archives. """ # Use same process as that used in load_external_metadata_tool; see that @@ -42,6 +47,7 @@ toolbox.tools_by_id[ history_imp_tool.id ] = history_imp_tool log.debug( "Loaded history import tool: %s", history_imp_tool.id ) + class JobImportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ): """ Class provides support for performing jobs that import a history from @@ -144,23 +150,23 @@ metadata = dataset_attrs['metadata'] # Create dataset and HDA. - hda = model.HistoryDatasetAssociation( name = dataset_attrs['name'].encode( 'utf-8' ), - extension = dataset_attrs['extension'], - info = dataset_attrs['info'].encode( 'utf-8' ), - blurb = dataset_attrs['blurb'], - peek = dataset_attrs['peek'], - designation = dataset_attrs['designation'], - visible = dataset_attrs['visible'], - dbkey = metadata['dbkey'], - metadata = metadata, - history = new_history, - create_dataset = True, - sa_session = self.sa_session ) + hda = model.HistoryDatasetAssociation( name=dataset_attrs['name'].encode( 'utf-8' ), + extension=dataset_attrs['extension'], + info=dataset_attrs['info'].encode( 'utf-8' ), + blurb=dataset_attrs['blurb'], + peek=dataset_attrs['peek'], + designation=dataset_attrs['designation'], + visible=dataset_attrs['visible'], + dbkey=metadata['dbkey'], + metadata=metadata, + history=new_history, + create_dataset=True, + sa_session=self.sa_session ) hda.state = hda.states.OK self.sa_session.add( hda ) self.sa_session.flush() - new_history.add_dataset( hda, genome_build = None ) - hda.hid = dataset_attrs['hid'] # Overwrite default hid set when HDA added to history. + new_history.add_dataset( hda, genome_build=None ) + hda.hid = dataset_attrs['hid'] # Overwrite default hid set when HDA added to history. # TODO: Is there a way to recover permissions? Is this needed? #permissions = trans.app.security_agent.history_get_default_permissions( new_history ) #trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions ) @@ -273,6 +279,7 @@ jiha.job.stderr += "Error cleaning up history import job: %s" % e self.sa_session.flush() + class JobExportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ): """ Class provides support for performing jobs that export a history to an @@ -317,23 +324,23 @@ """ Encode an HDA, default encoding for everything else. """ if isinstance( obj, trans.app.model.HistoryDatasetAssociation ): return { - "__HistoryDatasetAssociation__" : True, - "create_time" : obj.create_time.__str__(), - "update_time" : obj.update_time.__str__(), - "hid" : obj.hid, - "name" : to_unicode( obj.name ), - "info" : to_unicode( obj.info ), - "blurb" : obj.blurb, - "peek" : obj.peek, - "extension" : obj.extension, - "metadata" : prepare_metadata( dict( obj.metadata.items() ) ), - "parent_id" : obj.parent_id, - "designation" : obj.designation, - "deleted" : obj.deleted, - "visible" : obj.visible, - "file_name" : obj.file_name, - "annotation" : to_unicode( getattr( obj, 'annotation', '' ) ), - "tags" : get_item_tag_dict( obj ), + "__HistoryDatasetAssociation__": True, + "create_time": obj.create_time.__str__(), + "update_time": obj.update_time.__str__(), + "hid": obj.hid, + "name": to_unicode( obj.name ), + "info": to_unicode( obj.info ), + "blurb": obj.blurb, + "peek": obj.peek, + "extension": obj.extension, + "metadata": prepare_metadata( dict( obj.metadata.items() ) ), + "parent_id": obj.parent_id, + "designation": obj.designation, + "deleted": obj.deleted, + "visible": obj.visible, + "file_name": obj.file_name, + "annotation": to_unicode( getattr( obj, 'annotation', '' ) ), + "tags": get_item_tag_dict( obj ), } if isinstance( obj, UnvalidatedValue ): return obj.__str__() @@ -347,15 +354,15 @@ # Write history attributes to file. history = jeha.history history_attrs = { - "create_time" : history.create_time.__str__(), - "update_time" : history.update_time.__str__(), - "name" : to_unicode( history.name ), - "hid_counter" : history.hid_counter, - "genome_build" : history.genome_build, - "annotation" : to_unicode( self.get_item_annotation_str( trans.sa_session, history.user, history ) ), - "tags" : get_item_tag_dict( history ), - "includes_hidden_datasets" : include_hidden, - "includes_deleted_datasets" : include_deleted + "create_time": history.create_time.__str__(), + "update_time": history.update_time.__str__(), + "name": to_unicode( history.name ), + "hid_counter": history.hid_counter, + "genome_build": history.genome_build, + "annotation": to_unicode( self.get_item_annotation_str( trans.sa_session, history.user, history ) ), + "tags": get_item_tag_dict( history ), + "includes_hidden_datasets": include_hidden, + "includes_deleted_datasets": include_deleted } history_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name history_attrs_out = open( history_attrs_filename, 'w' ) @@ -391,7 +398,7 @@ # Get the associated job, if any. If this hda was copied from another, # we need to find the job that created the origial hda job_hda = hda - while job_hda.copied_from_history_dataset_association: #should this check library datasets as well? + while job_hda.copied_from_history_dataset_association: # should this check library datasets as well? job_hda = job_hda.copied_from_history_dataset_association if not job_hda.creating_job_associations: # No viable HDA found. @@ -472,4 +479,3 @@ shutil.rmtree( temp_dir ) except Exception, e: log.debug( 'Error deleting directory containing attribute files (%s): %s' % ( temp_dir, e ) ) - diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py --- a/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py +++ b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py @@ -6,19 +6,25 @@ --[url|file] source type, either a URL or a file. """ -import sys, optparse, tarfile, tempfile, urllib2, math +import sys +import optparse +import tarfile +import tempfile +import urllib2 +import math # Set max size of archive/file that will be handled to be 100 GB. This is # arbitrary and should be adjusted as needed. MAX_SIZE = 100 * math.pow( 2, 30 ) + def url_to_file( url, dest_file ): """ Transfer a file from a remote URL to a temporary file. """ try: url_reader = urllib2.urlopen( url ) - CHUNK = 10 * 1024 # 10k + CHUNK = 10 * 1024 # 10k total = 0 fp = open( dest_file, 'wb') while True: @@ -35,6 +41,7 @@ print "Exception getting file from URL: %s" % e, sys.stderr return None + def unpack_archive( archive_file, dest_dir ): """ Unpack a tar and/or gzipped archive into a destination directory. @@ -63,4 +70,4 @@ # Unpack archive. unpack_archive( archive_file, dest_dir ) except Exception, e: - print "Error unpacking tar/gz archive: %s" % e, sys.stderr \ No newline at end of file + print "Error unpacking tar/gz archive: %s" % e, sys.stderr diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1653,7 +1653,7 @@ field.add_option( "%s: (as %s) %s" % ( hid, target_ext, hda_name ), hda.id, selected ) # Also collect children via association object dataset_collector( hda.children, hid ) - dataset_collector( history.active_datasets, None ) + dataset_collector( history.active_datasets_children_and_roles, None ) some_data = bool( field.options ) if some_data: if value is None or len( field.options ) == 1: @@ -1693,11 +1693,6 @@ pass # no valid options def dataset_collector( datasets ): - def is_convertable( dataset ): - target_ext, converted_dataset = dataset.find_conversion_destination( self.formats ) - if target_ext is not None: - return True - return False for i, data in enumerate( datasets ): if data.visible and not data.deleted and data.state not in [data.states.ERROR, data.states.DISCARDED]: is_valid = False @@ -1714,7 +1709,7 @@ most_recent_dataset.append(data) # Also collect children via association object dataset_collector( data.children ) - dataset_collector( history.datasets ) + dataset_collector( history.active_datasets_children_and_roles ) most_recent_dataset.reverse() if already_used is not None: for val in most_recent_dataset: diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/parameters/grouping.py --- a/lib/galaxy/tools/parameters/grouping.py +++ b/lib/galaxy/tools/parameters/grouping.py @@ -361,6 +361,7 @@ 'filename' : os.path.basename( ftp_file ) } file_bunch = get_data_file_filename( ftp_data_file, override_name = name, override_info = info ) if file_bunch.path: + file_bunch.to_posix_lines = to_posix_lines file_bunch.space_to_tab = space_to_tab rval.append( file_bunch ) return rval diff -r 8d408c81c6c4abafce7461cab6c5e4b677e238d5 -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 lib/galaxy/tools/util/hyphy_util.py --- a/lib/galaxy/tools/util/hyphy_util.py +++ /dev/null @@ -1,1163 +0,0 @@ -#Dan Blankenberg -#Contains file contents and helper methods for HYPHY configurations -import tempfile, os - -def get_filled_temp_filename(contents): - fh = tempfile.NamedTemporaryFile('w') - filename = fh.name - fh.close() - fh = open(filename, 'w') - fh.write(contents) - fh.close() - return filename - -NJ_tree_shared_ibf = """ -COUNT_GAPS_IN_FREQUENCIES = 0; -methodIndex = 1; - -/*-----------------------------------------------------------------------------------------------------------------------------------------*/ - -function InferTreeTopology(verbFlag) -{ - distanceMatrix = {ds.species,ds.species}; - - MESSAGE_LOGGING = 0; - ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"chooseDistanceFormula.def"); - InitializeDistances (0); - - for (i = 0; i<ds.species; i=i+1) - { - for (j = i+1; j<ds.species; j = j+1) - { - distanceMatrix[i][j] = ComputeDistanceFormula (i,j); - } - } - - MESSAGE_LOGGING = 1; - cladesMade = 1; - - - if (ds.species == 2) - { - d1 = distanceMatrix[0][1]/2; - treeNodes = {{0,1,d1__}, - {1,1,d1__}, - {2,0,0}}; - - cladesInfo = {{2,0}}; - } - else - { - if (ds.species == 3) - { - /* generate least squares estimates here */ - - d1 = (distanceMatrix[0][1]+distanceMatrix[0][2]-distanceMatrix[1][2])/2; - d2 = (distanceMatrix[0][1]-distanceMatrix[0][2]+distanceMatrix[1][2])/2; - d3 = (distanceMatrix[1][2]+distanceMatrix[0][2]-distanceMatrix[0][1])/2; - - treeNodes = {{0,1,d1__}, - {1,1,d2__}, - {2,1,d3__} - {3,0,0}}; - - cladesInfo = {{3,0}}; - } - else - { - njm = (distanceMatrix > methodIndex)>=ds.species; - - treeNodes = {2*(ds.species+1),3}; - cladesInfo = {ds.species-1,2}; - - for (i=Rows(treeNodes)-1; i>=0; i=i-1) - { - treeNodes[i][0] = njm[i][0]; - treeNodes[i][1] = njm[i][1]; - treeNodes[i][2] = njm[i][2]; - } - - for (i=Rows(cladesInfo)-1; i>=0; i=i-1) - { - cladesInfo[i][0] = njm[i][3]; - cladesInfo[i][1] = njm[i][4]; - } - - njm = 0; - } - } - return 1.0; -} - -/*-----------------------------------------------------------------------------------------------------------------------------------------*/ - -function TreeMatrix2TreeString (doLengths) -{ - treeString = ""; - p = 0; - k = 0; - m = treeNodes[0][1]; - n = treeNodes[0][0]; - treeString*(Rows(treeNodes)*25); - - while (m) - { - if (m>p) - { - if (p) - { - treeString*","; - } - for (j=p;j<m;j=j+1) - { - treeString*"("; - } - } - else - { - if (m<p) - { - for (j=m;j<p;j=j+1) - { - treeString*")"; - } - } - else - { - treeString*","; - } - } - if (n<ds.species) - { - GetString (nodeName, ds, n); - if (doLengths != 1) - { - treeString*nodeName; - } - else - { - treeString*taxonNameMap[nodeName]; - } - } - if (doLengths>.5) - { - nodeName = ":"+treeNodes[k][2]; - treeString*nodeName; - } - k=k+1; - p=m; - n=treeNodes[k][0]; - m=treeNodes[k][1]; - } - - for (j=m;j<p;j=j+1) - { - treeString*")"; - } - - treeString*0; - return treeString; -} -""" - -def get_NJ_tree (filename): - return """ -DISTANCE_PROMPTS = 1; -ExecuteAFile ("%s"); - -DataSet ds = ReadDataFile (PROMPT_FOR_FILE); -DataSetFilter filteredData = CreateFilter (ds,1); - -/* do sequence to branch map */ - -taxonNameMap = {}; - -for (k=0; k<ds.species; k=k+1) -{ - GetString (thisName, ds,k); - shortName = (thisName^{{"\\\\..+",""}})&&1; - taxonNameMap[shortName] = thisName; - SetParameter (ds,k,shortName); -} - -DataSetFilter filteredData = CreateFilter (ds,1); -InferTreeTopology (0); -treeString = TreeMatrix2TreeString (1); - -fprintf (PROMPT_FOR_FILE, CLEAR_FILE, treeString); -fscanf (stdin, "String", ps_file); - -if (Abs(ps_file)) -{ - treeString = TreeMatrix2TreeString (2); - UseModel (USE_NO_MODEL); - Tree givenTree = treeString; - baseHeight = TipCount (givenTree)*28; - TREE_OUTPUT_OPTIONS = {}; - TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14; - baseWidth = 0; - treeAVL = givenTree^0; - drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n"; - for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1) - { - nodeName = (treeAVL[k3])["Name"]; - if(Abs((treeAVL[k3])["Children"]) == 0) - { - mySpecs = {}; - mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter"; - baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]); - } - } - baseWidth = 40*baseWidth; - - fprintf (ps_file, CLEAR_FILE, drawLetter, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}})); -} -""" % (filename) - -def get_NJ_treeMF (filename): - return """ -ExecuteAFile ("%s"); - -VERBOSITY_LEVEL = -1; -fscanf (PROMPT_FOR_FILE, "Lines", inLines); - -_linesIn = Columns (inLines); -isomorphicTreesBySequenceCount = {}; - -/*---------------------------------------------------------*/ - -_currentGene = 1; -_currentState = 0; -geneSeqs = ""; -geneSeqs * 128; - -fprintf (PROMPT_FOR_FILE, CLEAR_FILE, KEEP_OPEN); -treeOutFile = LAST_FILE_PATH; - -fscanf (stdin,"String", ps_file); -if (Abs(ps_file)) -{ - fprintf (ps_file, CLEAR_FILE, KEEP_OPEN); -} - -for (l=0; l<_linesIn; l=l+1) -{ - if (Abs(inLines[l]) == 0) - { - if (_currentState == 1) - { - geneSeqs * 0; - DataSet ds = ReadFromString (geneSeqs); - _processAGene (_currentGene,treeOutFile,ps_file); - geneSeqs * 128; - _currentGene = _currentGene + 1; - } - } - else - { - if (_currentState == 0) - { - _currentState = 1; - } - geneSeqs * inLines[l]; - geneSeqs * "\\n"; - } -} - - -if (_currentState == 1) -{ - geneSeqs * 0; - if (Abs(geneSeqs)) - { - DataSet ds = ReadFromString (geneSeqs); - _processAGene (_currentGene,treeOutFile,ps_file); - } -} - -fprintf (treeOutFile,CLOSE_FILE); -if (Abs(ps_file)) -{ - fprintf (ps_file,CLOSE_FILE); -} -/*---------------------------------------------------------*/ - -function _processAGene (_geneID, nwk_file, ps_file) -{ - if (ds.species == 1) - { - fprintf (nwk_file, _geneID-1, "\\tNone \\tNone\\n"); - return 0; - - } - - DataSetFilter filteredData = CreateFilter (ds,1); - - /* do sequence to branch map */ - - taxonNameMap = {}; - - for (k=0; k<ds.species; k=k+1) - { - GetString (thisName, ds,k); - shortName = (thisName^{{"\\\\..+",""}}); - taxonNameMap[shortName] = thisName; - SetParameter (ds,k,shortName); - } - - DataSetFilter filteredData = CreateFilter (ds,1); - DISTANCE_PROMPTS = (_geneID==1); - - InferTreeTopology (0); - baseTree = TreeMatrix2TreeString (0); - UseModel (USE_NO_MODEL); - - Tree baseTop = baseTree; - - /* standardize this top */ - - for (k=0; k<Abs(isomorphicTreesBySequenceCount[filteredData.species]); k=k+1) - { - testString = (isomorphicTreesBySequenceCount[filteredData.species])[k]; - Tree testTree = testString; - if (testTree == baseTop) - { - baseTree = testString; - break; - } - } - if (k==Abs(isomorphicTreesBySequenceCount[filteredData.species])) - { - if (k==0) - { - isomorphicTreesBySequenceCount[filteredData.species] = {}; - } - (isomorphicTreesBySequenceCount[filteredData.species])[k] = baseTree; - } - - fprintf (nwk_file, _geneID-1, "\\t", baseTree, "\\t", TreeMatrix2TreeString (1), "\\n"); - if (Abs(ps_file)) - { - treeString = TreeMatrix2TreeString (2); - UseModel (USE_NO_MODEL); - Tree givenTree = treeString; - baseHeight = TipCount (givenTree)*28; - TREE_OUTPUT_OPTIONS = {}; - TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14; - baseWidth = 0; - treeAVL = givenTree^0; - drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n"; - for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1) - { - nodeName = (treeAVL[k3])["Name"]; - if(Abs((treeAVL[k3])["Children"]) == 0) - { - mySpecs = {}; - mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter"; - baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]); - } - } - baseWidth = 40*baseWidth; - - fprintf (stdout, _geneID, ":", givenTree,"\\n"); - fprintf (ps_file, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}})); - } - return 0; -} -""" % (filename) - -BranchLengthsMF = """ -VERBOSITY_LEVEL = -1; - -fscanf (PROMPT_FOR_FILE, "Lines", inLines); - - - -_linesIn = Columns (inLines); - - - -/*---------------------------------------------------------*/ - - - -_currentGene = 1; - -_currentState = 0; - -geneSeqs = ""; - -geneSeqs * 128; - - - -for (l=0; l<_linesIn; l=l+1) - -{ - - if (Abs(inLines[l]) == 0) - - { - - if (_currentState == 1) - - { - - geneSeqs * 0; - - DataSet ds = ReadFromString (geneSeqs); - - _processAGene (_currentGene); - - geneSeqs * 128; - - _currentGene = _currentGene + 1; - - } - - } - - else - - { - - if (_currentState == 0) - - { - - _currentState = 1; - - } - - geneSeqs * inLines[l]; - - geneSeqs * "\\n"; - - } - -} - - - -if (_currentState == 1) - -{ - - geneSeqs * 0; - - if (Abs(geneSeqs)) - - { - - DataSet ds = ReadFromString (geneSeqs); - - _processAGene (_currentGene); - - } - -} - - - -fprintf (resultFile,CLOSE_FILE); - - - -/*---------------------------------------------------------*/ - - - -function _processAGene (_geneID) - -{ - - DataSetFilter filteredData = CreateFilter (ds,1); - - if (_currentGene == 1) - - { - - SelectTemplateModel (filteredData); - - - - SetDialogPrompt ("Tree file"); - - fscanf (PROMPT_FOR_FILE, "Tree", givenTree); - - fscanf (stdin, "String", resultFile); - - - - /* do sequence to branch map */ - - - - validNames = {}; - - taxonNameMap = {}; - - - - for (k=0; k<TipCount(givenTree); k=k+1) - - { - - validNames[TipName(givenTree,k)&&1] = 1; - - } - - - - for (k=0; k<BranchCount(givenTree); k=k+1) - - { - - thisName = BranchName(givenTree,k); - - taxonNameMap[thisName&&1] = thisName; - - } - - - - storeValidNames = validNames; - - fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Block\\tBranch\\tLength\\tLowerBound\\tUpperBound\\n"); - - } - - else - - { - - HarvestFrequencies (vectorOfFrequencies, filteredData, 1,1,1); - - validNames = storeValidNames; - - } - - - - for (k=0; k<ds.species; k=k+1) - - { - - GetString (thisName, ds,k); - - shortName = (thisName^{{"\\\\..+",""}})&&1; - - if (validNames[shortName]) - - { - - taxonNameMap[shortName] = thisName; - - validNames - (shortName); - - SetParameter (ds,k,shortName); - - } - - else - - { - - fprintf (resultFile,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree,"\\n"); - - return 0; - - } - - } - - - - /* */ - - - - LikelihoodFunction lf = (filteredData,givenTree); - - Optimize (res,lf); - - - - timer = Time(0)-timer; - - - - branchNames = BranchName (givenTree,-1); - - branchLengths = BranchLength (givenTree,-1); - - - - - - for (k=0; k<Columns(branchNames)-1; k=k+1) - - { - - COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t"; - - COVARIANCE_PRECISION = 0.95; - - CovarianceMatrix (cmx,lf); - - if (k==0) - - { - - /* compute a scaling factor */ - - ExecuteCommands ("givenTree."+branchNames[0]+".t=1"); - - scaleFactor = BranchLength (givenTree,0); - - ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]); - - } - - fprintf (resultFile,_geneID,"\\t",taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n"); - - } - - - - ttl = (branchLengths*(Transpose(branchLengths["1"])))[0]; - - global treeScaler = 1; - - ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree); - - COVARIANCE_PARAMETER = "treeScaler"; - - COVARIANCE_PRECISION = 0.95; - - CovarianceMatrix (cmx,lf); - - fprintf (resultFile,_geneID,"\\tTotal Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n"); - - ClearConstraints (givenTree); - - return 0; - -} -""" - -BranchLengths = """ -DataSet ds = ReadDataFile (PROMPT_FOR_FILE); -DataSetFilter filteredData = CreateFilter (ds,1); - -SelectTemplateModel (filteredData); - -SetDialogPrompt ("Tree file"); -fscanf (PROMPT_FOR_FILE, "Tree", givenTree); -fscanf (stdin, "String", resultFile); - -/* do sequence to branch map */ - -validNames = {}; -taxonNameMap = {}; - -for (k=0; k<TipCount(givenTree); k=k+1) -{ - validNames[TipName(givenTree,k)&&1] = 1; -} - -for (k=0; k<BranchCount(givenTree); k=k+1) -{ - thisName = BranchName(givenTree,k); - taxonNameMap[thisName&&1] = thisName; -} - -for (k=0; k<ds.species; k=k+1) -{ - GetString (thisName, ds,k); - shortName = (thisName^{{"\\\\..+",""}})&&1; - if (validNames[shortName]) - { - taxonNameMap[shortName] = thisName; - validNames - (shortName); - SetParameter (ds,k,shortName); - } - else - { - fprintf (resultFile,CLEAR_FILE,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree); - return 0; - } -} - -/* */ - -LikelihoodFunction lf = (filteredData,givenTree); - -Optimize (res,lf); - -timer = Time(0)-timer; - -branchNames = BranchName (givenTree,-1); -branchLengths = BranchLength (givenTree,-1); - -fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Branch\\tLength\\tLowerBound\\tUpperBound\\n"); - -for (k=0; k<Columns(branchNames)-1; k=k+1) -{ - COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t"; - COVARIANCE_PRECISION = 0.95; - CovarianceMatrix (cmx,lf); - if (k==0) - { - /* compute a scaling factor */ - ExecuteCommands ("givenTree."+branchNames[0]+".t=1"); - scaleFactor = BranchLength (givenTree,0); - ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]); - } - fprintf (resultFile,taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n"); -} - -ttl = (branchLengths*(Transpose(branchLengths["1"])))[0]; -global treeScaler = 1; -ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree); -COVARIANCE_PARAMETER = "treeScaler"; -COVARIANCE_PRECISION = 0.95; -CovarianceMatrix (cmx,lf); -ClearConstraints (givenTree); -fprintf (resultFile,"Total Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n"); -fprintf (resultFile,CLOSE_FILE); -""" - -SimpleLocalFitter = """ -VERBOSITY_LEVEL = -1; -COUNT_GAPS_IN_FREQUENCIES = 0; - -/*---------------------------------------------------------*/ - -function returnResultHeaders (dummy) -{ - _analysisHeaders = {}; - _analysisHeaders[0] = "BLOCK"; - _analysisHeaders[1] = "BP"; - _analysisHeaders[2] = "S_sites"; - _analysisHeaders[3] = "NS_sites"; - _analysisHeaders[4] = "Stop_codons"; - _analysisHeaders[5] = "LogL"; - _analysisHeaders[6] = "AC"; - _analysisHeaders[7] = "AT"; - _analysisHeaders[8] = "CG"; - _analysisHeaders[9] = "CT"; - _analysisHeaders[10] = "GT"; - _analysisHeaders[11] = "Tree"; - - for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1) - { - branchName = treeBranchNames[_biterator]; - - _analysisHeaders [Abs(_analysisHeaders)] = "length("+branchName+")"; - _analysisHeaders [Abs(_analysisHeaders)] = "dS("+branchName+")"; - _analysisHeaders [Abs(_analysisHeaders)] = "dN("+branchName+")"; - _analysisHeaders [Abs(_analysisHeaders)] = "omega("+branchName+")"; - } - - return _analysisHeaders; -} - -/*---------------------------------------------------------*/ - -function runAGeneFit (myID) -{ - DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions); - - if (_currentGene==1) - { - _MG94stdinOverload = {}; - _MG94stdinOverload ["0"] = "Local"; - _MG94stdinOverload ["1"] = modelSpecString; - - ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl", - _MG94stdinOverload); - - Tree codonTree = treeString; - } - else - { - HarvestFrequencies (observedFreq,filteredData,3,1,1); - MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq); - vectorOfFrequencies = BuildCodonFrequencies (observedFreq); - Model MG94customModel = (MG94custom,vectorOfFrequencies,0); - - Tree codonTree = treeString; - } - - LikelihoodFunction lf = (filteredData,codonTree); - - Optimize (res,lf); - - _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0); - _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree"); - - - _returnMe = {}; - _returnMe ["BLOCK"] = myID; - _returnMe ["LogL"] = res[1][0]; - _returnMe ["BP"] = _snsAVL ["Sites"]; - _returnMe ["S_sites"] = _snsAVL ["SSites"]; - _returnMe ["NS_sites"] = _snsAVL ["NSSites"]; - _returnMe ["AC"] = AC; - _returnMe ["AT"] = AT; - _returnMe ["CG"] = CG; - _returnMe ["CT"] = CT; - _returnMe ["GT"] = GT; - _returnMe ["Tree"] = Format(codonTree,0,1); - - for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1) - { - branchName = treeBranchNames[_biterator]; - - _returnMe ["length("+branchName+")"] = (_cL["Total"])[_biterator]; - _returnMe ["dS("+branchName+")"] = (_cL["Syn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["S_sites"]); - _returnMe ["dN("+branchName+")"] = (_cL["NonSyn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["NS_sites"]); - - ExecuteCommands ("_lom = _standardizeRatio(codonTree."+treeBranchNames[_biterator]+".nonSynRate,codonTree."+treeBranchNames[_biterator]+".synRate);"); - _returnMe ["omega("+branchName+")"] = _lom; - } - - return _returnMe; -} - -""" - -SimpleGlobalFitter = """ -VERBOSITY_LEVEL = -1; -COUNT_GAPS_IN_FREQUENCIES = 0; - -/*---------------------------------------------------------*/ - -function returnResultHeaders (dummy) -{ - _analysisHeaders = {}; - _analysisHeaders[0] = "BLOCK"; - _analysisHeaders[1] = "BP"; - _analysisHeaders[2] = "S_sites"; - _analysisHeaders[3] = "NS_sites"; - _analysisHeaders[4] = "Stop_codons"; - _analysisHeaders[5] = "LogL"; - _analysisHeaders[6] = "omega"; - _analysisHeaders[7] = "omega_range"; - _analysisHeaders[8] = "AC"; - _analysisHeaders[9] = "AT"; - _analysisHeaders[10] = "CG"; - _analysisHeaders[11] = "CT"; - _analysisHeaders[12] = "GT"; - _analysisHeaders[13] = "Tree"; - - return _analysisHeaders; -} - -/*---------------------------------------------------------*/ - -function runAGeneFit (myID) -{ - fprintf (stdout, "[SimpleGlobalFitter.bf on GENE ", myID, "]\\n"); - taxonNameMap = {}; - - for (k=0; k<ds.species; k=k+1) - { - GetString (thisName, ds,k); - shortName = (thisName^{{"\\\\..+",""}})&&1; - taxonNameMap[shortName] = thisName; - SetParameter (ds,k,shortName); - } - - DataSetFilter filteredData = CreateFilter (ds,1); - _nucSites = filteredData.sites; - - if (Abs(treeString)) - { - givenTreeString = treeString; - } - else - { - if (_currentGene==1) - { - ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"NJ.bf"); - } - givenTreeString = InferTreeTopology (0); - treeString = ""; - } - - DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions); - - if (_currentGene==1) - { - _MG94stdinOverload = {}; - _MG94stdinOverload ["0"] = "Global"; - _MG94stdinOverload ["1"] = modelSpecString; - - ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl", - _MG94stdinOverload); - - Tree codonTree = givenTreeString; - } - else - { - HarvestFrequencies (observedFreq,filteredData,3,1,1); - MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq); - vectorOfFrequencies = BuildCodonFrequencies (observedFreq); - Model MG94customModel = (MG94custom,vectorOfFrequencies,0); - - Tree codonTree = givenTreeString; - } - - LikelihoodFunction lf = (filteredData,codonTree); - - Optimize (res,lf); - - _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0); - _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree"); - - - _returnMe = {}; - _returnMe ["BLOCK"] = myID; - _returnMe ["LogL"] = res[1][0]; - _returnMe ["BP"] = _snsAVL ["Sites"]; - _returnMe ["S_sites"] = _snsAVL ["SSites"]; - _returnMe ["NS_sites"] = _snsAVL ["NSSites"]; - _returnMe ["Stop_codons"] = (_nucSites-filteredData.sites*3)$3; - _returnMe ["AC"] = AC; - _returnMe ["AT"] = AT; - _returnMe ["CG"] = CG; - _returnMe ["CT"] = CT; - _returnMe ["GT"] = GT; - _returnMe ["omega"] = R; - COVARIANCE_PARAMETER = "R"; - COVARIANCE_PRECISION = 0.95; - CovarianceMatrix (cmx,lf); - _returnMe ["omega_range"] = ""+cmx[0]+"-"+cmx[2]; - _returnMe ["Tree"] = Format(codonTree,0,1); - - - return _returnMe; -} -""" - -FastaReader = """ -fscanf (stdin, "String", _coreAnalysis); -fscanf (stdin, "String", _outputDriver); - -ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"chooseGeneticCode.def"); -ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"dSdNTreeTools.ibf"); -ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"CodonTools.bf"); -ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"GrabBag.bf"); - -SetDialogPrompt ("Tree file"); -fscanf (PROMPT_FOR_FILE, "Tree", givenTree); - -treeBranchNames = BranchName (givenTree,-1); -treeBranchCount = Columns (treeBranchNames)-1; -treeString = Format (givenTree,1,1); - -SetDialogPrompt ("Multiple gene FASTA file"); -fscanf (PROMPT_FOR_FILE, "Lines", inLines); -fscanf (stdin, "String", modelSpecString); -fscanf (stdin, "String", _outPath); - -ExecuteAFile (_outputDriver); -ExecuteAFile (_coreAnalysis); - -/*---------------------------------------------------------*/ - -_linesIn = Columns (inLines); -_currentGene = 1; - _currentState = 0; -/* 0 - waiting for a non-empty line */ -/* 1 - reading files */ - -geneSeqs = ""; -geneSeqs * 0; - -_prepareFileOutput (_outPath); - -for (l=0; l<_linesIn; l=l+1) -{ - if (Abs(inLines[l]) == 0) - { - if (_currentState == 1) - { - geneSeqs * 0; - DataSet ds = ReadFromString (geneSeqs); - _processAGene (ds.species == treeBranchCount,_currentGene); - geneSeqs * 128; - _currentGene = _currentGene + 1; - } - } - else - { - if (_currentState == 0) - { - _currentState = 1; - } - geneSeqs * inLines[l]; - geneSeqs * "\\n"; - } -} - -if (_currentState == 1) -{ - geneSeqs * 0; - DataSet ds = ReadFromString (geneSeqs); - _processAGene (ds.species == treeBranchCount,_currentGene); -} - -_finishFileOutput (0); -""" - -TabWriter = """ -/*---------------------------------------------------------*/ -function _prepareFileOutput (_outPath) -{ - _outputFilePath = _outPath; - - _returnHeaders = returnResultHeaders(0); - - fprintf (_outputFilePath, CLEAR_FILE, KEEP_OPEN, _returnHeaders[0]); - for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1) - { - fprintf (_outputFilePath,"\\t",_returnHeaders[_biterator]); - } - - - - fprintf (_outputFilePath,"\\n"); - return 0; -} - -/*---------------------------------------------------------*/ - -function _processAGene (valid, _geneID) -{ - if (valid) - { - returnValue = runAGeneFit (_geneID); - fprintf (_outputFilePath, returnValue[_returnHeaders[0]]); - for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1) - { - fprintf (_outputFilePath,"\\t",returnValue[_returnHeaders[_biterator]]); - } - fprintf (_outputFilePath, "\\n"); - } - /* - else - { - fprintf (_outputFilePath, - _geneID, ", Incorrect number of sequences\\n"); - } - */ - _currentState = 0; - return 0; -} - -/*---------------------------------------------------------*/ -function _finishFileOutput (dummy) -{ - return 0; -} -""" - -def get_dnds_config_filename(Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename ): - contents = """ -_genomeScreenOptions = {}; - -/* all paths are either absolute or relative -to the DATA READER */ - -_genomeScreenOptions ["0"] = "%s"; - /* which analysis to run on each gene; */ -_genomeScreenOptions ["1"] = "%s"; - /* what output to produce; */ -_genomeScreenOptions ["2"] = "%s"; - /* genetic code */ -_genomeScreenOptions ["3"] = "%s"; - /* tree file */ -_genomeScreenOptions ["4"] = "%s"; - /* alignment file */ -_genomeScreenOptions ["5"] = "%s"; - /* nucleotide bias string; can define any of the 203 models */ -_genomeScreenOptions ["6"] = "%s"; - /* output csv file */ - -ExecuteAFile ("%s", _genomeScreenOptions); -""" % (Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename ) - return get_filled_temp_filename(contents) - - -def get_branch_lengths_config_filename(input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename): - contents = """ -_genomeScreenOptions = {}; - -/* all paths are either absolute or relative -to the NucDataBranchLengths.bf */ - -_genomeScreenOptions ["0"] = "%s"; - /* the file to analyze; */ -_genomeScreenOptions ["1"] = "CUSTOM"; - /* use an arbitrary nucleotide model */ -_genomeScreenOptions ["2"] = "%s"; - /* which model to use */ -_genomeScreenOptions ["3"] = "%s"; - /* model options */ -_genomeScreenOptions ["4"] = "Estimated"; - /* rate parameters */ -_genomeScreenOptions ["5"] = "%s"; - /* base frequencies */ -_genomeScreenOptions ["6"] = "%s"; - /* the tree to use; */ -_genomeScreenOptions ["7"] = "%s"; - /* write .csv output to; */ - -ExecuteAFile ("%s", _genomeScreenOptions); -""" % (input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename) - return get_filled_temp_filename(contents) - - -def get_nj_tree_config_filename(input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename): - contents = """ -_genomeScreenOptions = {}; - -/* all paths are either absolute or relative -to the BuildNJTree.bf */ - -_genomeScreenOptions ["0"] = "%s"; - /* the file to analyze; */ -_genomeScreenOptions ["1"] = "%s"; - /* pick which distance metric to use; TN93 is a good default */ -_genomeScreenOptions ["2"] = "%s"; - /* write Newick tree output to; */ -_genomeScreenOptions ["3"] = "%s"; - /* write a postscript tree file to this file; leave blank to not write a tree */ - -ExecuteAFile ("%s", _genomeScreenOptions); -""" % (input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename) - return get_filled_temp_filename(contents) - - -def get_nj_treeMF_config_filename(input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename): - contents = """ -_genomeScreenOptions = {}; - -/* all paths are either absolute or relative -to the BuildNJTreeMF.bf */ - -_genomeScreenOptions ["0"] = "%s"; - /* the multiple alignment file to analyze; */ -_genomeScreenOptions ["1"] = "%s"; - /* write Newick tree output to; */ -_genomeScreenOptions ["2"] = "%s"; - /* write a postscript tree file to this file; leave blank to not write a tree */ -_genomeScreenOptions ["3"] = "%s"; - /* pick which distance metric to use; TN93 is a good default */ - -ExecuteAFile ("%s", _genomeScreenOptions); -""" % (input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename) - return get_filled_temp_filename(contents) This diff is so big that we needed to truncate the remainder. https://bitbucket.org/galaxy/galaxy-central/commits/e0e1dab49f16/ Changeset: e0e1dab49f16 Branch: job-search User: Kyle Ellrott Date: 2014-01-29 21:52:46 Summary: Fixing parameter finding for non-string parameters (ints/floats) Affected #: 1 file diff -r 7637cfbea32e2a5e4af28400fa88909cbb6bd058 -r e0e1dab49f16eacf6e43c7193129b8371866c2e8 lib/galaxy/webapps/galaxy/api/jobs.py --- a/lib/galaxy/webapps/galaxy/api/jobs.py +++ b/lib/galaxy/webapps/galaxy/api/jobs.py @@ -131,7 +131,7 @@ raise exceptions.ObjectNotFound( "Dataset %s not found" % ( v[ 'id' ] ) ) input_data[k] = dataset.dataset_id else: - input_param[k] = json.dumps( v ) + input_param[k] = json.dumps( str(v) ) query = trans.sa_session.query( trans.app.model.Job ).filter( trans.app.model.Job.tool_id == tool_id, https://bitbucket.org/galaxy/galaxy-central/commits/be7ece945447/ Changeset: be7ece945447 User: jmchilton Date: 2014-01-31 05:44:58 Summary: Merged in kellrott/galaxy-central/job-search (pull request #316) Fixing non-string parameter selection for job searches Affected #: 1 file diff -r 24753d36e6a60bb446bd961d16cb4b59abf7b13b -r be7ece9454471d655668f3997c29b90c2f6c312d lib/galaxy/webapps/galaxy/api/jobs.py --- a/lib/galaxy/webapps/galaxy/api/jobs.py +++ b/lib/galaxy/webapps/galaxy/api/jobs.py @@ -131,7 +131,7 @@ raise exceptions.ObjectNotFound( "Dataset %s not found" % ( v[ 'id' ] ) ) input_data[k] = dataset.dataset_id else: - input_param[k] = json.dumps( v ) + input_param[k] = json.dumps( str(v) ) query = trans.sa_session.query( trans.app.model.Job ).filter( trans.app.model.Job.tool_id == tool_id, Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org