1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/e1404e285ccb/ Changeset: e1404e285ccb Branch: stable User: jmchilton Date: 2014-10-15 16:57:10+00:00 Summary: Fix for datatypes consuming output extra files path for the change in d781366. That change forced $output.extra_files_path to be the same thing as $output.files_path. See rational in changeset. Some datatypes however consume this path when generated metadata - post d781366 these datatypes would be accessing the wrong directory. This fixes them. The suggested best practice I would put forward is $input.extra_files_path and $output.files_path. $output.extra_files_path redirects to $output.files_path for newer Galaxy versions (August 2014+) so that Galaxy configurations requiring this directory be in the jobs working directory work. Datatypes consuming dataset.extra_files_path for these outputs will be broken when used with $output.files_path for the August release of Galaxy - but if this changeset is back-ported to the October 2014 release then these types and tools will work going forward (without modification). It can be verified that the older releases are broken with the following existing test (fixed with this changeset). ./run_functional_tests.sh -framework -id composite_output Affected #: 2 files diff -r cdd3a8a9cd7d5d71705843d2fa448462c7ed0675 -r e1404e285ccb9e52bd09c5827934cd0da1974acc lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1332,6 +1332,7 @@ self.purged = False self.purgable = purgable self.external_filename = external_filename + self.external_extra_files_path = None self._extra_files_path = extra_files_path self.file_size = file_size if uuid is None: @@ -1355,9 +1356,20 @@ else: self.external_filename = filename file_name = property( get_file_name, set_file_name ) - @property - def extra_files_path( self ): - return self.object_store.get_filename( self, dir_only=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id ) + def get_extra_files_path( self ): + # Unlike get_file_name - extrnal_extra_files_path is not backed by an + # actual database column so if SA instantiates this object - the + # attribute won't exist yet. + if not getattr( self, "external_extra_files_path", None ): + return self.object_store.get_filename( self, dir_only=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id ) + else: + return os.path.abspath( self.external_extra_files_path ) + def set_extra_files_path( self, extra_files_path ): + if not extra_files_path: + self.external_extra_files_path = None + else: + self.external_extra_files_path = extra_files_path + extra_files_path = property( get_extra_files_path, set_extra_files_path) def _calculate_size( self ): if self.external_filename: try: diff -r cdd3a8a9cd7d5d71705843d2fa448462c7ed0675 -r e1404e285ccb9e52bd09c5827934cd0da1974acc scripts/set_metadata.py --- a/scripts/set_metadata.py +++ b/scripts/set_metadata.py @@ -120,6 +120,8 @@ dataset = cPickle.load( open( filename_in ) ) # load DatasetInstance if dataset_filename_override: dataset.dataset.external_filename = dataset_filename_override + files_path = os.path.abspath(os.path.join( tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id) )) + dataset.dataset.external_extra_files_path = files_path if dataset.dataset.id in existing_job_metadata_dict: dataset.extension = existing_job_metadata_dict[ dataset.dataset.id ].get( 'ext', dataset.extension ) # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.