commit/galaxy-central: jmchilton: Fix API uploads (must have broken with API/Web transaction unification).
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/69f70d84619d/ Changeset: 69f70d84619d Branch: next-stable User: jmchilton Date: 2013-05-26 22:10:43 Summary: Fix API uploads (must have broken with API/Web transaction unification). Affected #: 3 files diff -r 4df405e7a2aff50c4ef83806c55d01448227cc5d -r 69f70d84619de8024e8de7ef5669bda6dfaffce8 lib/galaxy/tools/actions/upload.py --- a/lib/galaxy/tools/actions/upload.py +++ b/lib/galaxy/tools/actions/upload.py @@ -16,7 +16,7 @@ incoming = upload_common.persist_uploads( incoming ) # We can pass an empty string as the cntrller here since it is used to check whether we # are in an admin view, and this tool is currently not used there. - uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs ) + uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs, history=history ) upload_common.cleanup_unused_precreated_datasets( precreated_datasets ) if not uploaded_datasets: @@ -24,4 +24,4 @@ json_file_path = upload_common.create_paramfile( trans, uploaded_datasets ) data_list = [ ud.data for ud in uploaded_datasets ] - return upload_common.create_job( trans, incoming, tool, json_file_path, data_list ) + return upload_common.create_job( trans, incoming, tool, json_file_path, data_list, history=history ) diff -r 4df405e7a2aff50c4ef83806c55d01448227cc5d -r 69f70d84619de8024e8de7ef5669bda6dfaffce8 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -107,11 +107,13 @@ data.state = data.states.ERROR data.info = 'No file contents were available.' -def new_history_upload( trans, uploaded_dataset, state=None ): +def __new_history_upload( trans, uploaded_dataset, history=None, state=None ): + if not history: + history = trans.history hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name, extension = uploaded_dataset.file_type, dbkey = uploaded_dataset.dbkey, - history = trans.history, + history = history, create_dataset = True, sa_session = trans.sa_session ) if state: @@ -120,12 +122,13 @@ hda.state = hda.states.QUEUED trans.sa_session.add( hda ) trans.sa_session.flush() - trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey ) - permissions = trans.app.security_agent.history_get_default_permissions( trans.history ) + history.add_dataset( hda, genome_build=uploaded_dataset.dbkey ) + permissions = trans.app.security_agent.history_get_default_permissions( history ) trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions ) trans.sa_session.flush() return hda -def new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ): + +def __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ): current_user_roles = trans.get_current_user_roles() if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ): # This doesn't have to be pretty - the only time this should happen is if someone's being malicious. @@ -206,19 +209,22 @@ trans.sa_session.add( dp ) trans.sa_session.flush() return ldda -def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, state=None ): +def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None ): if library_bunch: - return new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state ) + return __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state ) + elif history: + return __new_history_upload( trans, uploaded_dataset, history=history, state=state ) else: - return new_history_upload( trans, uploaded_dataset, state ) -def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None ): + raise Exception("new_upload must be called with empty values for library_bunch and history") + +def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None, history=None ): uploaded_datasets = [] for dataset_upload_input in dataset_upload_inputs: uploaded_datasets.extend( dataset_upload_input.get_uploaded_datasets( trans, params ) ) for uploaded_dataset in uploaded_datasets: data = get_precreated_dataset( precreated_datasets, uploaded_dataset.name ) if not data: - data = new_upload( trans, cntrller, uploaded_dataset, library_bunch ) + data = new_upload( trans, cntrller, uploaded_dataset, library_bunch=library_bunch, history=history ) else: data.extension = uploaded_dataset.file_type data.dbkey = uploaded_dataset.dbkey @@ -246,7 +252,9 @@ trans.sa_session.add( info_association ) trans.sa_session.flush() else: - trans.history.genome_build = uploaded_dataset.dbkey + if not history: + history = trans.history + history.genome_build = uploaded_dataset.dbkey uploaded_dataset.data = data return uploaded_datasets def create_paramfile( trans, uploaded_datasets ): @@ -320,7 +328,7 @@ if trans.app.config.external_chown_script: _chown( json_file_path ) return json_file_path -def create_job( trans, params, tool, json_file_path, data_list, folder=None ): +def create_job( trans, params, tool, json_file_path, data_list, folder=None, history=None ): """ Create the upload job. """ @@ -333,7 +341,9 @@ if folder: job.library_folder_id = folder.id else: - job.history_id = trans.history.id + if not history: + history = trans.history + job.history_id = history.id job.tool_id = tool.id job.tool_version = tool.version job.state = job.states.UPLOAD diff -r 4df405e7a2aff50c4ef83806c55d01448227cc5d -r 69f70d84619de8024e8de7ef5669bda6dfaffce8 lib/galaxy/webapps/galaxy/api/tools.py --- a/lib/galaxy/webapps/galaxy/api/tools.py +++ b/lib/galaxy/webapps/galaxy/api/tools.py @@ -78,7 +78,6 @@ if history_id: target_history = trans.sa_session.query(trans.app.model.History).get( trans.security.decode_id(history_id)) - trans.galaxy_session.current_history = target_history else: target_history = None Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org