commit/galaxy-central: 7 new changesets
7 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/5f373922d077/ Changeset: 5f373922d077 User: jmchilton Date: 2014-01-29 15:42:39 Summary: Move some history export logic out of controller into model... For reuse by API. Affected #: 3 files diff -r 6b627e15f18b13a489ca4e4343703931d9333021 -r 5f373922d0773eccab37f51145b8e880760e86b4 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -647,6 +647,7 @@ return self.library_dataset_dataset_association return None + class JobExportHistoryArchive( object ): def __init__( self, job=None, history=None, dataset=None, compressed=False, \ history_attrs_filename=None, datasets_attrs_filename=None, @@ -659,6 +660,35 @@ self.datasets_attrs_filename = datasets_attrs_filename self.jobs_attrs_filename = jobs_attrs_filename + @property + def up_to_date( self ): + """ Return False, if a new export should be generated for corresponding + history. + """ + job = self.job + return job.state not in [ Job.states.ERROR, Job.states.DELETED ] \ + and job.update_time > self.history.update_time + + @property + def ready( self ): + return self.job.state == Job.states.OK + + @property + def preparing( self ): + return self.job.state in [ Job.states.RUNNING, Job.states.QUEUED, Job.states.WAITING ] + + @property + def export_name( self ): + # Stream archive. + valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + hname = self.history.name + hname = ''.join(c in valid_chars and c or '_' for c in hname)[0:150] + hname = "Galaxy-History-%s.tar" % ( hname ) + if self.compressed: + hname += ".gz" + return hname + + class JobImportHistoryArchive( object ): def __init__( self, job=None, history=None, archive_dir=None ): self.job = job @@ -875,6 +905,11 @@ return changed @property + def latest_export( self ): + exports = self.exports + return exports and exports[ 0 ] + + @property def get_disk_size_bytes( self ): return self.get_disk_size( nice_size=False ) diff -r 6b627e15f18b13a489ca4e4343703931d9333021 -r 5f373922d0773eccab37f51145b8e880760e86b4 lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -1295,6 +1295,7 @@ mapper( model.History, model.History.table, properties=dict( galaxy_sessions=relation( model.GalaxySessionToHistoryAssociation ), datasets=relation( model.HistoryDatasetAssociation, backref="history", order_by=asc(model.HistoryDatasetAssociation.table.c.hid) ), + exports=relation( model.JobExportHistoryArchive, primaryjoin=( model.JobExportHistoryArchive.table.c.history_id == model.History.table.c.id ), order_by=desc( model.JobExportHistoryArchive.table.c.id ) ), active_datasets=relation( model.HistoryDatasetAssociation, primaryjoin=( ( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetAssociation.table.c.deleted ) ), order_by=asc( model.HistoryDatasetAssociation.table.c.hid ), viewonly=True ), visible_datasets=relation( model.HistoryDatasetAssociation, primaryjoin=( ( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetAssociation.table.c.deleted ) & model.HistoryDatasetAssociation.table.c.visible ), order_by=asc( model.HistoryDatasetAssociation.table.c.hid ), viewonly=True ), diff -r 6b627e15f18b13a489ca4e4343703931d9333021 -r 5f373922d0773eccab37f51145b8e880760e86b4 lib/galaxy/webapps/galaxy/controllers/history.py --- a/lib/galaxy/webapps/galaxy/controllers/history.py +++ b/lib/galaxy/webapps/galaxy/controllers/history.py @@ -704,24 +704,17 @@ # # If history has already been exported and it has not changed since export, stream it. # - jeha = trans.sa_session.query( model.JobExportHistoryArchive ).filter_by( history=history ) \ - .order_by( model.JobExportHistoryArchive.id.desc() ).first() - if jeha and ( jeha.job.state not in [ model.Job.states.ERROR, model.Job.states.DELETED ] ) \ - and jeha.job.update_time > history.update_time: - if jeha.job.state == model.Job.states.OK: - # Stream archive. - valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' - hname = history.name - hname = ''.join(c in valid_chars and c or '_' for c in hname)[0:150] - hname = "Galaxy-History-%s.tar" % ( hname ) + jeha = history.latest_export + if jeha and jeha.up_to_date: + if jeha.ready: if jeha.compressed: - hname += ".gz" trans.response.set_content_type( 'application/x-gzip' ) else: trans.response.set_content_type( 'application/x-tar' ) - trans.response.headers["Content-Disposition"] = 'attachment; filename="%s"' % ( hname ) + disposition = 'attachment; filename="%s"' % jeha.export_name + trans.response.headers["Content-Disposition"] = disposition return open( trans.app.object_store.get_filename( jeha.dataset ) ) - elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]: + elif jeha.preparing: return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \ % ( { 'n' : history.name, 's' : url_for( controller='history', action="export_archive", id=id, qualified=True ) } ) ) https://bitbucket.org/galaxy/galaxy-central/commits/e0a4bdd5b89f/ Changeset: e0a4bdd5b89f User: jmchilton Date: 2014-01-29 15:42:39 Summary: Refactor history export functionality into ExportsHistoryMixin... ... for reuse in API. Affected #: 2 files diff -r 5f373922d0773eccab37f51145b8e880760e86b4 -r e0a4bdd5b89f326b6b1c8ed17aec084b89c5e1ac lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -562,6 +562,40 @@ return changed +class ExportsHistoryMixin: + + def serve_ready_history_export( self, trans, jeha ): + assert jeha.ready + if jeha.compressed: + trans.response.set_content_type( 'application/x-gzip' ) + else: + trans.response.set_content_type( 'application/x-tar' ) + disposition = 'attachment; filename="%s"' % jeha.export_name + trans.response.headers["Content-Disposition"] = disposition + return open( trans.app.object_store.get_filename( jeha.dataset ) ) + + def queue_history_export( self, trans, history, gzip=True, include_hidden=False, include_deleted=False ): + # Convert options to booleans. + # + if isinstance( gzip, basestring ): + gzip = ( gzip in [ 'True', 'true', 'T', 't' ] ) + if isinstance( include_hidden, basestring ): + include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] ) + if isinstance( include_deleted, basestring ): + include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] ) + + # Run job to do export. + history_exp_tool = trans.app.toolbox.get_tool( '__EXPORT_HISTORY__' ) + params = { + 'history_to_export': history, + 'compress': gzip, + 'include_hidden': include_hidden, + 'include_deleted': include_deleted + } + + history_exp_tool.execute( trans, incoming=params, set_output_hid=True ) + + class UsesHistoryDatasetAssociationMixin: """ Mixin for controllers that use HistoryDatasetAssociation objects. diff -r 5f373922d0773eccab37f51145b8e880760e86b4 -r e0a4bdd5b89f326b6b1c8ed17aec084b89c5e1ac lib/galaxy/webapps/galaxy/controllers/history.py --- a/lib/galaxy/webapps/galaxy/controllers/history.py +++ b/lib/galaxy/webapps/galaxy/controllers/history.py @@ -13,6 +13,7 @@ from galaxy.util.sanitize_html import sanitize_html from galaxy.web import error, url_for from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin +from galaxy.web.base.controller import ExportsHistoryMixin from galaxy.web.base.controller import ERROR, INFO, SUCCESS, WARNING from galaxy.web.framework.helpers import grids, iff, time_ago @@ -187,7 +188,7 @@ return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False ) class HistoryController( BaseUIController, SharableMixin, UsesAnnotations, UsesItemRatings, - UsesHistoryMixin, UsesHistoryDatasetAssociationMixin ): + UsesHistoryMixin, UsesHistoryDatasetAssociationMixin, ExportsHistoryMixin ): @web.expose def index( self, trans ): return "" @@ -679,16 +680,6 @@ def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ): """ Export a history to an archive. """ # - # Convert options to booleans. - # - if isinstance( gzip, basestring ): - gzip = ( gzip in [ 'True', 'true', 'T', 't' ] ) - if isinstance( include_hidden, basestring ): - include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] ) - if isinstance( include_deleted, basestring ): - include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] ) - - # # Get history to export. # if id: @@ -707,25 +698,12 @@ jeha = history.latest_export if jeha and jeha.up_to_date: if jeha.ready: - if jeha.compressed: - trans.response.set_content_type( 'application/x-gzip' ) - else: - trans.response.set_content_type( 'application/x-tar' ) - disposition = 'attachment; filename="%s"' % jeha.export_name - trans.response.headers["Content-Disposition"] = disposition - return open( trans.app.object_store.get_filename( jeha.dataset ) ) + return self.serve_ready_history_export( trans, jeha ) elif jeha.preparing: return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \ % ( { 'n' : history.name, 's' : url_for( controller='history', action="export_archive", id=id, qualified=True ) } ) ) - # Run job to do export. - history_exp_tool = trans.app.toolbox.get_tool( '__EXPORT_HISTORY__' ) - params = { - 'history_to_export' : history, - 'compress' : gzip, - 'include_hidden' : include_hidden, - 'include_deleted' : include_deleted } - history_exp_tool.execute( trans, incoming = params, set_output_hid = True ) + self.queue_history_export( trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted ) url = url_for( controller='history', action="export_archive", id=id, qualified=True ) return trans.show_message( "Exporting History '%(n)s'. Use this link to download \ the archive or import it to another Galaxy server: \ https://bitbucket.org/galaxy/galaxy-central/commits/3a3561fa8df3/ Changeset: 3a3561fa8df3 User: jmchilton Date: 2014-01-29 15:42:41 Summary: Refactor history import functionality into ImportsHistoryMixin... ... for reuse in API. Affected #: 2 files diff -r e0a4bdd5b89f326b6b1c8ed17aec084b89c5e1ac -r 3a3561fa8df334b43eff9ce0d648b38932bb27e6 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -596,6 +596,15 @@ history_exp_tool.execute( trans, incoming=params, set_output_hid=True ) +class ImportsHistoryMixin: + + def queue_history_import( self, trans, archive_type, archive_source ): + # Run job to do import. + history_imp_tool = trans.app.toolbox.get_tool( '__IMPORT_HISTORY__' ) + incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type } + history_imp_tool.execute( trans, incoming=incoming ) + + class UsesHistoryDatasetAssociationMixin: """ Mixin for controllers that use HistoryDatasetAssociation objects. diff -r e0a4bdd5b89f326b6b1c8ed17aec084b89c5e1ac -r 3a3561fa8df334b43eff9ce0d648b38932bb27e6 lib/galaxy/webapps/galaxy/controllers/history.py --- a/lib/galaxy/webapps/galaxy/controllers/history.py +++ b/lib/galaxy/webapps/galaxy/controllers/history.py @@ -14,6 +14,7 @@ from galaxy.web import error, url_for from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin from galaxy.web.base.controller import ExportsHistoryMixin +from galaxy.web.base.controller import ImportsHistoryMixin from galaxy.web.base.controller import ERROR, INFO, SUCCESS, WARNING from galaxy.web.framework.helpers import grids, iff, time_ago @@ -188,7 +189,8 @@ return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False ) class HistoryController( BaseUIController, SharableMixin, UsesAnnotations, UsesItemRatings, - UsesHistoryMixin, UsesHistoryDatasetAssociationMixin, ExportsHistoryMixin ): + UsesHistoryMixin, UsesHistoryDatasetAssociationMixin, ExportsHistoryMixin, + ImportsHistoryMixin ): @web.expose def index( self, trans ): return "" @@ -669,10 +671,7 @@ # TODO: add support for importing via a file. #.add_input( "file", "Archived History File", "archive_file", value=None, error=None ) ) - # Run job to do import. - history_imp_tool = trans.app.toolbox.get_tool( '__IMPORT_HISTORY__' ) - incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type } - history_imp_tool.execute( trans, incoming=incoming ) + self.queue_history_import( trans, archive_type=archive_type, archive_source=archive_source ) return trans.show_message( "Importing history from '%s'. \ This history will be visible when the import is complete" % archive_source ) https://bitbucket.org/galaxy/galaxy-central/commits/17f1d214f8a4/ Changeset: 17f1d214f8a4 User: jmchilton Date: 2014-01-29 15:42:42 Summary: Add new API decorator for raw downloads. Affected #: 2 files diff -r 3a3561fa8df334b43eff9ce0d648b38932bb27e6 -r 17f1d214f8a44995f082e0fd32d3801edef02303 lib/galaxy/web/__init__.py --- a/lib/galaxy/web/__init__.py +++ b/lib/galaxy/web/__init__.py @@ -19,3 +19,4 @@ # TODO: Drop and make these the default. from framework import _future_expose_api from framework import _future_expose_api_anonymous +from framework import _future_expose_api_raw diff -r 3a3561fa8df334b43eff9ce0d648b38932bb27e6 -r 17f1d214f8a44995f082e0fd32d3801edef02303 lib/galaxy/web/framework/__init__.py --- a/lib/galaxy/web/framework/__init__.py +++ b/lib/galaxy/web/framework/__init__.py @@ -274,6 +274,10 @@ return _future_expose_api( func, to_json=to_json, user_required=False ) +def _future_expose_api_raw( func ): + return _future_expose_api( func, to_json=False, user_required=True ) + + # TODO: rename as expose_api and make default. def _future_expose_api( func, to_json=True, user_required=True ): """ https://bitbucket.org/galaxy/galaxy-central/commits/ea320e650645/ Changeset: ea320e650645 User: jmchilton Date: 2014-01-29 15:42:42 Summary: Implement history exporting via API. Two new routes one that can be PUT against to poll for download_url and one to fetch a completed download via a GET. Includes simple test case. This doesn't cleanly map to REST to my mind so I have hobbled together this approach, happy for further input. Good reading: https://restful-api-design.readthedocs.org/en/latest/methods.html#asynchrono... http://stackoverflow.com/questions/4099869/is-it-wrong-to-return-202-accepte... Affected #: 5 files diff -r 17f1d214f8a44995f082e0fd32d3801edef02303 -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda lib/galaxy/tools/actions/history_imp_exp.py --- a/lib/galaxy/tools/actions/history_imp_exp.py +++ b/lib/galaxy/tools/actions/history_imp_exp.py @@ -14,7 +14,8 @@ # Create job. # job = trans.app.model.Job() - job.session_id = trans.get_galaxy_session().id + session = trans.get_galaxy_session() + job.session_id = session and session.id job.history_id = trans.history.id job.tool_id = tool.id job.user_id = trans.user.id @@ -76,8 +77,13 @@ # Create the job and output dataset objects # job = trans.app.model.Job() - job.session_id = trans.get_galaxy_session().id - job.history_id = trans.history.id + session = trans.get_galaxy_session() + job.session_id = session and session.id + if history: + history_id = history.id + else: + history_id = trans.history.id + job.history_id = history_id job.tool_id = tool.id if trans.user: # If this is an actual user, run the job as that individual. Otherwise we're running as guest. diff -r 17f1d214f8a44995f082e0fd32d3801edef02303 -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -593,7 +593,7 @@ 'include_deleted': include_deleted } - history_exp_tool.execute( trans, incoming=params, set_output_hid=True ) + history_exp_tool.execute( trans, incoming=params, history=history, set_output_hid=True ) class ImportsHistoryMixin: diff -r 17f1d214f8a44995f082e0fd32d3801edef02303 -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda lib/galaxy/webapps/galaxy/api/histories.py --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -8,21 +8,25 @@ pkg_resources.require( "Paste" ) from paste.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPInternalServerError, HTTPException +from galaxy import exceptions from galaxy import web from galaxy.web import _future_expose_api as expose_api from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous +from galaxy.web import _future_expose_api_raw as expose_api_raw from galaxy.util import string_as_bool from galaxy.util import restore_text from galaxy.web.base.controller import BaseAPIController from galaxy.web.base.controller import UsesHistoryMixin from galaxy.web.base.controller import UsesTagsMixin +from galaxy.web.base.controller import ExportsHistoryMixin from galaxy.web import url_for import logging log = logging.getLogger( __name__ ) -class HistoriesController( BaseAPIController, UsesHistoryMixin, UsesTagsMixin ): +class HistoriesController( BaseAPIController, UsesHistoryMixin, UsesTagsMixin, + ExportsHistoryMixin ): @expose_api_anonymous def index( self, trans, deleted='False', **kwd ): @@ -329,6 +333,67 @@ return changed + @expose_api + def archive_export( self, trans, id, **kwds ): + """ + export_archive( self, trans, id, payload ) + * PUT /api/histories/{id}/exports: + start job (if needed) to create history export for corresponding + history. + + :type id: str + :param id: the encoded id of the history to undelete + + :rtype: dict + :returns: object containing url to fetch export from. + """ + # PUT instead of POST because multiple requests should just result + # in one object being created. + history_id = id + history = self.get_history( trans, history_id, check_ownership=False, check_accessible=True ) + jeha = history.latest_export + up_to_date = jeha and jeha.up_to_date + if not up_to_date: + # Need to create new JEHA + job. + gzip = kwds.get( "gzip", True ) + include_hidden = kwds.get( "include_hidden", False ) + include_deleted = kwds.get( "include_deleted", False ) + self.queue_history_export( trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted ) + + if up_to_date and jeha.ready: + jeha_id = trans.security.encode_id( jeha.id ) + return dict( download_url=url_for( "history_archive_download", id=history_id, jeha_id=jeha_id ) ) + else: + # Valid request, just resource is not ready yet. + trans.response.status = "202 Accepted" + return '' + + @expose_api_raw + def archive_download( self, trans, id, jeha_id, **kwds ): + """ + export_download( self, trans, id, jeha_id ) + * GET /api/histories/{id}/exports/{jeha_id}: + If ready and available, return raw contents of exported history. + Use/poll "PUT /api/histories/{id}/exports" to initiate the creation + of such an export - when ready that route will return 200 status + code (instead of 202) with a JSON dictionary containing a + `download_url`. + """ + # Seems silly to put jeha_id in here, but want GET to be immuatable? + # and this is being accomplished this way. + history = self.get_history( trans, id, check_ownership=False, check_accessible=True ) + matching_exports = filter( lambda e: trans.security.encode_id( e.id ) == jeha_id, history.exports ) + if not matching_exports: + raise exceptions.ObjectNotFound() + + jeha = matching_exports[ 0 ] + if not jeha.ready: + # User should not have been given this URL, PUT export should have + # return a 202. + raise exceptions.MessageException( "Export not available or not yet ready." ) + + return self.serve_ready_history_export( trans, jeha ) + def _validate_and_parse_update_payload( self, payload ): """ Validate and parse incomming data payload for a history. diff -r 17f1d214f8a44995f082e0fd32d3801edef02303 -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -154,6 +154,11 @@ webapp.mapper.connect( "set_as_current", "/api/histories/{id}/set_as_current", controller="histories", action="set_as_current", conditions=dict( method=["POST"] ) ) + webapp.mapper.connect( "history_archive_export", "/api/histories/{id}/exports", + controller="histories", action="archive_export", conditions=dict( method=[ "PUT" ] ) ) + webapp.mapper.connect( "history_archive_download", "/api/histories/{id}/exports/{jeha_id}", + controller="histories", action="archive_download", conditions=dict( method=[ "GET" ] ) ) + webapp.mapper.connect( "create_api_key", "/api/users/:user_id/api_key", controller="users", action="api_key", user_id=None, conditions=dict( method=["POST"] ) ) diff -r 17f1d214f8a44995f082e0fd32d3801edef02303 -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda test/functional/api/test_histories.py --- a/test/functional/api/test_histories.py +++ b/test/functional/api/test_histories.py @@ -1,9 +1,14 @@ +import time from base import api -# requests.post or something like it if unavailable +# requests.{post,put,get} or something like it if unavailable from base.interactor import post_request +from base.interactor import put_request +from base.interactor import get_request +from .helpers import TestsDatasets -class HistoriesApiTestCase( api.ApiTestCase ): + +class HistoriesApiTestCase( api.ApiTestCase, TestsDatasets ): def test_create_history( self ): # Create a history. @@ -24,3 +29,24 @@ histories_url = self._api_url( "histories" ) create_response = post_request( url=histories_url, data=post_data ) self._assert_status_code_is( create_response, 403 ) + + def test_export( self ): + history_id = self._new_history() + self._new_dataset( history_id, content="1 2 3" ) + self._wait_for_history( history_id, assert_ok=True ) + export_url = self._api_url( "histories/%s/exports" % history_id , use_key=True ) + put_response = put_request( export_url ) + self._assert_status_code_is( put_response, 202 ) + while True: + put_response = put_request( export_url ) + if put_response.status_code == 202: + time.sleep( .1 ) + else: + break + self._assert_status_code_is( put_response, 200 ) + response = put_response.json() + self._assert_has_keys( response, "download_url" ) + download_path = response[ "download_url" ] + full_download_url = "%s%s?key=%s" % ( self.url, download_path, self.galaxy_interactor.api_key ) + download_response = get_request( full_download_url ) + self._assert_status_code_is( download_response, 200 ) https://bitbucket.org/galaxy/galaxy-central/commits/f456b2eaf3dc/ Changeset: f456b2eaf3dc User: jmchilton Date: 2014-01-29 15:42:42 Summary: Implement history import via API. Affected #: 3 files diff -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda -r f456b2eaf3dc61df52d710f710590d37b31386ca lib/galaxy/tools/actions/history_imp_exp.py --- a/lib/galaxy/tools/actions/history_imp_exp.py +++ b/lib/galaxy/tools/actions/history_imp_exp.py @@ -16,7 +16,13 @@ job = trans.app.model.Job() session = trans.get_galaxy_session() job.session_id = session and session.id - job.history_id = trans.history.id + if history: + history_id = history.id + elif trans.history: + history_id = trans.history.id + else: + history_id = None + job.history_id = history_id job.tool_id = tool.id job.user_id = trans.user.id start_job_state = job.state #should be job.states.NEW diff -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda -r f456b2eaf3dc61df52d710f710590d37b31386ca lib/galaxy/webapps/galaxy/api/histories.py --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -19,6 +19,7 @@ from galaxy.web.base.controller import UsesHistoryMixin from galaxy.web.base.controller import UsesTagsMixin from galaxy.web.base.controller import ExportsHistoryMixin +from galaxy.web.base.controller import ImportsHistoryMixin from galaxy.web import url_for import logging @@ -26,7 +27,7 @@ class HistoriesController( BaseAPIController, UsesHistoryMixin, UsesTagsMixin, - ExportsHistoryMixin ): + ExportsHistoryMixin, ImportsHistoryMixin ): @expose_api_anonymous def index( self, trans, deleted='False', **kwd ): @@ -176,6 +177,9 @@ :rtype: dict :returns: element view of new history """ + if self.__create_via_import( payload ): + return self.__import_archive( trans, payload ) + hist_name = None if payload.get( 'name', None ): hist_name = restore_text( payload['name'] ) @@ -394,6 +398,14 @@ return self.serve_ready_history_export( trans, jeha ) + def __create_via_import( self, payload ): + return "archive_source" in payload + + def __import_archive( self, trans, payload ): + archive_type = payload.get( "archive_type", "url" ) + archive_source = payload[ "archive_source" ] + self.queue_history_import( trans, archive_type=archive_type, archive_source=archive_source ) + def _validate_and_parse_update_payload( self, payload ): """ Validate and parse incomming data payload for a history. diff -r ea320e650645a2512b1ac9ab7c35a8cec4ef2bda -r f456b2eaf3dc61df52d710f710590d37b31386ca test/functional/api/test_histories.py --- a/test/functional/api/test_histories.py +++ b/test/functional/api/test_histories.py @@ -7,7 +7,6 @@ from .helpers import TestsDatasets - class HistoriesApiTestCase( api.ApiTestCase, TestsDatasets ): def test_create_history( self ): @@ -31,12 +30,13 @@ self._assert_status_code_is( create_response, 403 ) def test_export( self ): - history_id = self._new_history() + history_id = self._new_history( name="for_export" ) self._new_dataset( history_id, content="1 2 3" ) self._wait_for_history( history_id, assert_ok=True ) export_url = self._api_url( "histories/%s/exports" % history_id , use_key=True ) put_response = put_request( export_url ) self._assert_status_code_is( put_response, 202 ) + # TODO: Break after some period of time. while True: put_response = put_request( export_url ) if put_response.status_code == 202: @@ -50,3 +50,21 @@ full_download_url = "%s%s?key=%s" % ( self.url, download_path, self.galaxy_interactor.api_key ) download_response = get_request( full_download_url ) self._assert_status_code_is( download_response, 200 ) + + def history_names(): + history_index = self._get( "histories" ) + return map( lambda h: h[ "name" ], history_index.json() ) + + import_name = "imported from archive: for_export" + assert import_name not in history_names() + + import_data = dict( archive_source=full_download_url, archive_type="url" ) + import_response = self._post( "histories", data=import_data ) + + self._assert_status_code_is( import_response, 200 ) + found = False + while not found: + time.sleep( .1 ) + if import_name in history_names(): + found = True + assert found, "%s not in history names %s" % ( import_name, history_names() ) https://bitbucket.org/galaxy/galaxy-central/commits/6788cffb792b/ Changeset: 6788cffb792b User: jmchilton Date: 2014-02-02 22:23:46 Summary: Introduce method ready_name_for_url. Pulled code out of create_item_slug placed in util for reused in other operations - such as creating a history export. Update history export to use this. Slugs mapped names to a signficantly more restricted character set so history export file names will now be all lower case for instance and will never contain parens, commas, periods, or underscores for instance. Previous incarnation of this idea (not committed) also 'lower'-ed the name - this behavior is not desired for history exports the way it is for slugs - hence moved .lower() back to create_item_slug and renamed the method ready_name_for_url instead of ready_name_for_slug. Idea from Jeremy Goecks (https://bitbucket.org/galaxy/galaxy-central/pull-request/314/history-import-...). Affected #: 3 files diff -r f456b2eaf3dc61df52d710f710590d37b31386ca -r 6788cffb792bc88bb6cfb3a15aff1d29a8c4a700 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -27,6 +27,7 @@ from galaxy.model.item_attrs import Dictifiable, UsesAnnotations from galaxy.security import get_permitted_actions from galaxy.util import is_multi_byte, nice_size, Params, restore_text, send_mail +from galaxy.util import ready_name_for_url from galaxy.util.bunch import Bunch from galaxy.util.hash_util import new_secure_hash from galaxy.util.directory_hash import directory_hash_id @@ -680,9 +681,7 @@ @property def export_name( self ): # Stream archive. - valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' - hname = self.history.name - hname = ''.join(c in valid_chars and c or '_' for c in hname)[0:150] + hname = ready_name_for_url( self.history.name ) hname = "Galaxy-History-%s.tar" % ( hname ) if self.compressed: hname += ".gz" diff -r f456b2eaf3dc61df52d710f710590d37b31386ca -r 6788cffb792bc88bb6cfb3a15aff1d29a8c4a700 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Utility functions used systemwide. @@ -391,6 +392,28 @@ return out +def ready_name_for_url( raw_name ): + """ General method to convert a string (i.e. object name) to a URL-ready + slug. + + >>> ready_name_for_url( "My Cool Object" ) + 'My-Cool-Object' + >>> ready_name_for_url( "!My Cool Object!" ) + 'My-Cool-Object' + >>> ready_name_for_url( "Hello₩◎ґʟⅾ" ) + 'Hello' + """ + + # Replace whitespace with '-' + slug_base = re.sub( "\s+", "-", raw_name ) + # Remove all non-alphanumeric characters. + slug_base = re.sub( "[^a-zA-Z0-9\-]", "", slug_base ) + # Remove trailing '-'. + if slug_base.endswith('-'): + slug_base = slug_base[:-1] + return slug_base + + def in_directory( file, directory ): """ Return true, if the common prefix of both is equal to directory diff -r f456b2eaf3dc61df52d710f710590d37b31386ca -r 6788cffb792bc88bb6cfb3a15aff1d29a8c4a700 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -2643,13 +2643,7 @@ item_name = item.name elif hasattr( item, 'title' ): item_name = item.title - # Replace whitespace with '-' - slug_base = re.sub( "\s+", "-", item_name.lower() ) - # Remove all non-alphanumeric characters. - slug_base = re.sub( "[^a-zA-Z0-9\-]", "", slug_base ) - # Remove trailing '-'. - if slug_base.endswith('-'): - slug_base = slug_base[:-1] + slug_base = util.ready_name_for_url( item_name.lower() ) else: slug_base = cur_slug Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org