commit/galaxy-central: 2 new changesets
2 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/72cbac1ff125/ changeset: 72cbac1ff125 user: natefoo date: 2011-11-16 19:38:24 summary: Make linking data in a library upload a job parameter so it can be checked during job finish to ensure the linked files are not overwritten when using outputs_to_working_directory. This special-case fix is a temporary solution that should be removed as soon as we have a smarter way to handle "immutable outputs." affected #: 4 files diff -r 537a42e5d26578413f20b369643110404502b7c3 -r 72cbac1ff125c8133f4ba4e6c2607d816dcd8e56 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -541,7 +541,7 @@ self.version_string = open(version_filename).read() os.unlink(version_filename) - if self.app.config.outputs_to_working_directory: + if self.app.config.outputs_to_working_directory and not self.__link_file_check(): for dataset_path in self.get_output_fnames(): try: shutil.move( dataset_path.false_path, dataset_path.real_path ) @@ -879,6 +879,15 @@ else: return 'anonymous@unknown' + def __link_file_check( self ): + """ outputs_to_working_directory breaks library uploads where data is + linked. This method is a hack that solves that problem, but is + specific to the upload tool and relies on an injected job param. This + method should be removed ASAP and replaced with some properly generic + and stateful way of determining link-only datasets. -nate + """ + return self.tool.id == 'upload1' and self.param_dict.get( 'link_data_only', None ) == 'link_to_files' + class TaskWrapper(JobWrapper): """ Extension of JobWrapper intended for running tasks. diff -r 537a42e5d26578413f20b369643110404502b7c3 -r 72cbac1ff125c8133f4ba4e6c2607d816dcd8e56 lib/galaxy/tools/actions/upload.py --- a/lib/galaxy/tools/actions/upload.py +++ b/lib/galaxy/tools/actions/upload.py @@ -25,4 +25,4 @@ json_file_path = upload_common.create_paramfile( trans, uploaded_datasets ) data_list = [ ud.data for ud in uploaded_datasets ] - return upload_common.create_job( trans, incoming, tool, json_file_path, data_list, return_job=True ) + return upload_common.create_job( trans, incoming, tool, json_file_path, data_list ) diff -r 537a42e5d26578413f20b369643110404502b7c3 -r 72cbac1ff125c8133f4ba4e6c2607d816dcd8e56 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -294,7 +294,7 @@ json_file.write( to_json_string( json ) + '\n' ) json_file.close() return json_file_path -def create_job( trans, params, tool, json_file_path, data_list, folder=None, return_job=False ): +def create_job( trans, params, tool, json_file_path, data_list, folder=None ): """ Create the upload job. """ @@ -341,10 +341,7 @@ output = odict() for i, v in enumerate( data_list ): output[ 'output%i' % i ] = v - if return_job: - return job, output - else: - return output + return job, output def active_folders( trans, folder ): # Stolen from galaxy.web.controllers.library_common (importing from which causes a circular issues). # Much faster way of retrieving all active sub-folders within a given folder than the diff -r 537a42e5d26578413f20b369643110404502b7c3 -r 72cbac1ff125c8133f4ba4e6c2607d816dcd8e56 lib/galaxy/web/controllers/library_common.py --- a/lib/galaxy/web/controllers/library_common.py +++ b/lib/galaxy/web/controllers/library_common.py @@ -1045,7 +1045,12 @@ status='error' ) ) json_file_path = upload_common.create_paramfile( trans, uploaded_datasets ) data_list = [ ud.data for ud in uploaded_datasets ] - return upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder ) + job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder ) + # HACK: Prevent outputs_to_working_directory from overwriting inputs when "linking" + job.add_parameter( 'link_data_only', to_json_string( kwd.get( 'link_data_only', 'copy_files' ) ) ) + trans.sa_session.add( job ) + trans.sa_session.flush() + return output def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ): library_bunch.replace_dataset = None # not valid for these types of upload uploaded_dataset = util.bunch.Bunch() https://bitbucket.org/galaxy/galaxy-central/changeset/c658f8f1e6ea/ changeset: c658f8f1e6ea user: natefoo date: 2011-11-16 19:53:05 summary: Bugfix for the last commit, since JobWrapper.param_dict doesn't exist after a server restart. affected #: 1 file diff -r 72cbac1ff125c8133f4ba4e6c2607d816dcd8e56 -r c658f8f1e6ea651c7d9f7fc2ca77512cad6beeb5 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -886,7 +886,9 @@ method should be removed ASAP and replaced with some properly generic and stateful way of determining link-only datasets. -nate """ - return self.tool.id == 'upload1' and self.param_dict.get( 'link_data_only', None ) == 'link_to_files' + job = self.get_job() + param_dict = job.get_param_values( self.app ) + return self.tool.id == 'upload1' and param_dict.get( 'link_data_only', None ) == 'link_to_files' class TaskWrapper(JobWrapper): """ Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket