1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/46169ca11f43/ changeset: 46169ca11f43 user: natefoo date: 2012-01-09 19:19:56 summary: Make paths to metadata temp files absolute. affected #: 4 files diff -r 2acedad2ef87c907975dd5afab5eef9ccde31987 -r 46169ca11f432651d3ff08281dfaf896ff4f5b8c lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -1,6 +1,7 @@ import sys, logging, copy, shutil, weakref, cPickle, tempfile, os +from os.path import abspath -from galaxy.util import string_as_bool, relpath, stringify_dictionary_keys, listify +from galaxy.util import string_as_bool, stringify_dictionary_keys, listify from galaxy.util.odict import odict from galaxy.web import form_builder import galaxy.model @@ -476,7 +477,7 @@ def file_name( self ): if self._filename is None: #we need to create a tmp file, accessable across all nodes/heads, save the name, and return it - self._filename = relpath( tempfile.NamedTemporaryFile( dir = self.tmp_dir, prefix = "metadata_temp_file_" ).name ) + self._filename = abspath( tempfile.NamedTemporaryFile( dir = self.tmp_dir, prefix = "metadata_temp_file_" ).name ) open( self._filename, 'wb+' ) #create an empty file, so it can't be reused using tempfile return self._filename def to_JSON( self ): @@ -563,7 +564,7 @@ #is located differently, i.e. on a cluster node with a different filesystem structure #file to store existing dataset - metadata_files.filename_in = relpath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_in_%s_" % key ).name ) + metadata_files.filename_in = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_in_%s_" % key ).name ) #FIXME: HACK #sqlalchemy introduced 'expire_on_commit' flag for sessionmaker at version 0.5x @@ -574,17 +575,17 @@ cPickle.dump( dataset, open( metadata_files.filename_in, 'wb+' ) ) #file to store metadata results of set_meta() - metadata_files.filename_out = relpath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_out_%s_" % key ).name ) + metadata_files.filename_out = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_out_%s_" % key ).name ) open( metadata_files.filename_out, 'wb+' ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible) #file to store a 'return code' indicating the results of the set_meta() call #results code is like (True/False - if setting metadata was successful/failed , exception or string of reason of success/failure ) - metadata_files.filename_results_code = relpath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_results_%s_" % key ).name ) + metadata_files.filename_results_code = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_results_%s_" % key ).name ) simplejson.dump( ( False, 'External set_meta() not called' ), open( metadata_files.filename_results_code, 'wb+' ) ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible) #file to store kwds passed to set_meta() - metadata_files.filename_kwds = relpath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_kwds_%s_" % key ).name ) + metadata_files.filename_kwds = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_kwds_%s_" % key ).name ) simplejson.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True ) #existing metadata file parameters need to be overridden with cluster-writable file locations - metadata_files.filename_override_metadata = relpath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_override_%s_" % key ).name ) + metadata_files.filename_override_metadata = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_override_%s_" % key ).name ) open( metadata_files.filename_override_metadata, 'wb+' ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible) override_metadata = [] for meta_key, spec_value in dataset.metadata.spec.iteritems(): diff -r 2acedad2ef87c907975dd5afab5eef9ccde31987 -r 46169ca11f432651d3ff08281dfaf896ff4f5b8c lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -709,11 +709,11 @@ try: for fname in self.extra_filenames: os.remove( fname ) - self.app.object_store.delete(self.get_job(), base_dir='job_work', dir_only=True, extra_dir=str(self.job_id)) if self.app.config.set_metadata_externally: self.external_output_metadata.cleanup_external_metadata( self.sa_session ) galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session ) galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session ) + self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, extra_dir=str(self.job_id)) except: log.exception( "Unable to cleanup job %d" % self.job_id ) diff -r 2acedad2ef87c907975dd5afab5eef9ccde31987 -r 46169ca11f432651d3ff08281dfaf896ff4f5b8c lib/galaxy/jobs/runners/__init__.py --- a/lib/galaxy/jobs/runners/__init__.py +++ b/lib/galaxy/jobs/runners/__init__.py @@ -31,7 +31,7 @@ commands += "; cd %s; " % os.path.abspath( os.getcwd() ) commands += job_wrapper.setup_external_metadata( exec_dir = os.path.abspath( os.getcwd() ), - tmp_dir = self.app.config.new_file_path, + tmp_dir = job_wrapper.working_directory, dataset_files_path = self.app.model.Dataset.file_path, output_fnames = job_wrapper.get_output_fnames(), set_extension = False, diff -r 2acedad2ef87c907975dd5afab5eef9ccde31987 -r 46169ca11f432651d3ff08281dfaf896ff4f5b8c lib/galaxy/jobs/runners/local.py --- a/lib/galaxy/jobs/runners/local.py +++ b/lib/galaxy/jobs/runners/local.py @@ -110,6 +110,7 @@ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths: external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), set_extension = True, + tmp_dir = job_wrapper.working_directory, kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) ) external_metadata_proc = subprocess.Popen( args = external_metadata_script, Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.