galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2013
- 1 participants
- 183 discussions
commit/galaxy-central: inithello: Reference the right app.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a7bd59964f50/
changeset: a7bd59964f50
user: inithello
date: 2013-03-18 20:51:04
summary: Reference the right app.
affected #: 1 file
diff -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda -r a7bd59964f5053133897e6e7b89065a608e11d03 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -399,7 +399,7 @@
tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
- tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
+ tool_util.copy_sample_files( trans.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: browser tests: refactoring, add api module
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f19741fdb9a6/
changeset: f19741fdb9a6
user: carlfeberhard
date: 2013-03-18 20:14:22
summary: browser tests: refactoring, add api module
affected #: 14 files
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -175,15 +175,20 @@
if from_ld_id:
try:
ld = self.get_library_dataset( trans, from_ld_id, check_ownership=False, check_accessible=False )
- assert type( ld ) is trans.app.model.LibraryDataset, "Library content id ( %s ) is not a dataset" % from_ld_id
+ assert type( ld ) is trans.app.model.LibraryDataset, (
+ "Library content id ( %s ) is not a dataset" % from_ld_id )
+
except AssertionError, e:
trans.response.status = 400
return str( e )
+
except Exception, e:
return str( e )
+
hda = ld.library_dataset_dataset_association.to_history_dataset_association( history, add_to_history=True )
trans.sa_session.flush()
return hda.get_api_value()
+
else:
# TODO: implement other "upload" methods here.
trans.response.status = 403
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/anon-history-tests.js
--- a/test/casperjs/anon-history-tests.js
+++ b/test/casperjs/anon-history-tests.js
@@ -68,15 +68,10 @@
});
// ------------------------------------------------------------------- check the empty history for well formedness
-// grab the history frame bounds for mouse later tests
-spaceghost.then( function(){
- historyFrameInfo = this.getElementInfo( 'iframe[name="galaxy_history"]' );
- //this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
-});
-
spaceghost.thenOpen( spaceghost.baseUrl, function testPanelStructure(){
this.test.comment( 'history panel for anonymous user, new history' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
+
+ this.withHistoryPanel( function(){
this.test.comment( "frame should have proper url and title: 'History'" );
this.test.assertMatch( this.getCurrentUrl(), /\/history/, 'Found history frame url' );
this.test.assertTitle( this.getTitle(), 'History', 'Found history frame title' );
@@ -103,15 +98,12 @@
'Message contains "' + emptyMsgStr + '"' );
this.test.comment( 'name should have a tooltip with info on anon-user name editing' );
- // mouse over to find tooltip
- this.historypanel.hoverOver( nameSelector, function testingHover(){
- this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
- this.test.assertSelectorHasText( tooltipSelector, anonNameTooltip );
- }, historyFrameInfo );
+ this.historypanel.hoverOver( nameSelector );
+ this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
+ this.test.assertSelectorHasText( tooltipSelector, anonNameTooltip );
this.test.comment( 'name should NOT be editable when clicked by anon-user' );
- this.assertDoesntHaveClass( nameSelector, editableTextClass,
- "Name field is not classed as editable text" );
+ this.assertDoesntHaveClass( nameSelector, editableTextClass, "Name field is not classed as editable text" );
this.click( nameSelector );
this.test.assertDoesntExist( editableTextInput, "Clicking on name does not create an input" );
});
@@ -120,12 +112,13 @@
// ------------------------------------------------------------------- anon user can upload file
spaceghost.then( function testAnonUpload(){
this.test.comment( 'anon-user should be able to upload files' );
+
spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
- this.debug( 'uploaded HDA info: ' + this.jsonStr( _uploadInfo ) );
+ this.debug( 'uploaded HDA info: ' + this.jsonStr( this.quickInfo( _uploadInfo.hdaElement ) ) );
var hasHda = _uploadInfo.hdaElement,
hasClass = _uploadInfo.hdaElement.attributes[ 'class' ],
hasOkClass = _uploadInfo.hdaElement.attributes[ 'class' ].indexOf( 'historyItem-ok' ) !== -1;
- this.test.assert( ( hasHda && hasClass && hasOkClass ), "Uploaded file: " + _uploadInfo.name );
+ this.test.assert( ( hasHda && hasClass && hasOkClass ), "Uploaded file: " + _uploadInfo.hdaElement.text );
testUploadInfo = _uploadInfo;
});
});
@@ -134,19 +127,18 @@
this.test.assertNotVisible( emptyMsgSelector, 'Empty history message is not visible' );
});
-
// ------------------------------------------------------------------- anon user can run tool on file
// ------------------------------------------------------------------- anon user registers/logs in -> same history
spaceghost.user.loginOrRegisterUser( email, password );
spaceghost.thenOpen( spaceghost.baseUrl, function(){
+ this.test.comment( 'anon-user should login and be associated with previous history' );
- this.test.comment( 'anon-user should login and be associated with previous history' );
var loggedInAs = spaceghost.user.loggedInAs();
this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- var hdaInfo = this.historypanel.hdaElementInfoByTitle( testUploadInfo.name, testUploadInfo.hid );
+ this.historypanel.waitForHdas( function(){
+ var hdaInfo = this.historypanel.hdaElementInfoByTitle( testUploadInfo.hdaElement.text );
this.test.assert( hdaInfo !== null, "After logging in - found a matching hda by name and hid" );
if( hdaInfo ){
this.test.assert( testUploadInfo.hdaElement.attributes.id === hdaInfo.attributes.id,
@@ -155,17 +147,18 @@
});
});
+// ------------------------------------------------------------------- logs out -> new history
spaceghost.user.logout();
spaceghost.thenOpen( spaceghost.baseUrl, function(){
this.test.comment( 'logging out should create a new, anonymous history' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
+
+ this.historypanel.waitForHdas( function(){
this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
'Message contains "' + emptyMsgStr + '"' );
});
});
-
// ===================================================================
spaceghost.run( function(){
this.test.done();
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/casperjs_runner.py
--- a/test/casperjs/casperjs_runner.py
+++ b/test/casperjs/casperjs_runner.py
@@ -73,7 +73,7 @@
debug = False
# bit of a hack - this is the beginning of the last string when capserjs --verbose=true --logLevel=debug
# use this to get subprocess to stop waiting for output
- casper_done_str = '# Tests complete'
+ casper_done_str = '# Stopping'
# convert js test results to unittest.TestResults
results_adapter = None #CasperJsonToUnittestResultsConverter()
@@ -209,7 +209,8 @@
js_test_results = json.loads( results )
failures = js_test_results[ 'testResults' ][ 'failures' ]
assert len( failures ) == 0, (
- "Some assertions failed in the headless browser tests (see the log for details)" )
+ "%d assertions failed in the headless browser tests" %( len( failures ) )
+ + " (see the log for details)" )
# ---------------------------------------------------------------- TestCase overrides
def setUp( self ):
@@ -299,10 +300,12 @@
# ==================================================================== TESTCASE EXAMPLE
# these could be broken out into other py files - shouldn't be necc. ATM
class Test_01_User( CasperJSTestCase ):
- """TestCase that uses javascript and a headless browser to test dynamic pages.
+ """Tests for the Galaxy user centered functionality:
+ registration, login, etc.
"""
def test_10_registration( self ):
- """User registration tests: register new user, logout, attempt bad registrations.
+ """User registration tests:
+ register new user, logout, attempt bad registrations.
"""
# all keywords will be compiled into a single JSON obj and passed to the server
#self.run_js_script( 'registration-tests.js',
@@ -331,7 +334,7 @@
class Test_03_HistoryPanel( CasperJSTestCase ):
- """(Minimal) casperjs tests for tools.
+ """Tests for History fetching, rendering, and modeling.
"""
def test_00_history_panel( self ):
"""Test history panel basics (controls, structure, refresh, history options menu, etc.).
@@ -339,13 +342,18 @@
self.run_js_script( 'history-panel-tests.js' )
def test_10_anonymous_histories( self ):
+ """Test history options button.
+ """
+ self.run_js_script( 'history-options-tests.js' )
+
+ def test_20_anonymous_histories( self ):
"""Test history panel basics with an anonymous user.
"""
self.run_js_script( 'anon-history-tests.js' )
class Test_04_HDAs( CasperJSTestCase ):
- """(Minimal) casperjs tests for tools.
+ """Tests for HistoryDatasetAssociation fetching, rendering, and modeling.
"""
def test_00_HDA_states( self ):
"""Test structure rendering of HDAs in all the possible HDA states
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/hda-state-tests.js
--- a/test/casperjs/hda-state-tests.js
+++ b/test/casperjs/hda-state-tests.js
@@ -55,17 +55,10 @@
// start a new user
spaceghost.user.loginOrRegisterUser( email, password );
-// grab the history frame bounds for later mouse tests
-spaceghost.then( function(){
- historyFrameInfo = this.getElementInfo( 'iframe[name="galaxy_history"]' );
- //this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
-});
-
// upload a file
spaceghost.then( function upload(){
spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
testUploadInfo = _uploadInfo;
- this.info( 'testUploadInfo:' + this.jsonStr( testUploadInfo ) );
});
});
@@ -73,13 +66,12 @@
// =================================================================== TEST HELPERS
//NOTE: to be called with fn.call( spaceghost, ... )
-function testTitle( hdaSelector, hid, name ){
- var titleSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.title,
- titleShouldBe = hid + ': ' + name;
+function testTitle( hdaSelector, name ){
+ var titleSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.title;
this.test.assertVisible( titleSelector,
'HDA title is visible' );
- this.test.assertSelectorHasText( titleSelector, titleShouldBe,
- 'HDA has proper hid and title' );
+ this.test.assertSelectorHasText( titleSelector, name,
+ 'HDA contains name (' + name + '): ' + this.fetchText( titleSelector ) );
}
function testTitleButtonStructure( hdaSelector, shouldHaveTheseButtons ){
@@ -87,28 +79,9 @@
shouldHaveTheseButtons = shouldHaveTheseButtons || [ 'display', 'edit', 'delete' ];
var hdaDbId = this.getElementAttribute( hdaSelector, 'id' ).split( '-' )[1],
- buttonsArea = hdaSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons,
- buttons = {
- // this seems backwards -> TODO: move buttonsArea concat into loop below, move this data to historypanel.data
- display : {
- nodeName : this.historypanel.data.text.hda.ok.nodeNames.displayButton,
- selector : buttonsArea + ' ' + this.historypanel.data.selectors.hda.displayButton,
- tooltip : this.historypanel.data.text.hda.ok.tooltips.displayButton,
- hrefTpl : this.historypanel.data.text.hda.ok.hrefs.displayButton
- },
- edit : {
- nodeName : this.historypanel.data.text.hda.ok.nodeNames.editAttrButton,
- selector : buttonsArea + ' ' + this.historypanel.data.selectors.hda.editAttrButton,
- tooltip : this.historypanel.data.text.hda.ok.tooltips.editAttrButton,
- hrefTpl : this.historypanel.data.text.hda.ok.hrefs.editAttrButton
- },
- 'delete' : {
- nodeName : this.historypanel.data.text.hda.ok.nodeNames.deleteButton,
- selector : buttonsArea + ' ' + this.historypanel.data.selectors.hda.deleteButton,
- tooltip : this.historypanel.data.text.hda.ok.tooltips.deleteButton,
- hrefTpl : this.historypanel.data.text.hda.ok.hrefs.deleteButton
- }
- };
+ buttonsArea = hdaSelector + ' ' + this.historypanel.data.selectors.hda.titleButtonArea,
+ buttons = this.historypanel.data.hdaTitleButtons;
+
this.test.assertVisible( buttonsArea, 'Button area is visible' );
for( var i=0; i<shouldHaveTheseButtons.length; i++ ){
@@ -123,7 +96,7 @@
this.test.assertVisible( button.selector, buttonName + ' button is visible' );
var buttonElement = this.getElementInfo( button.selector );
- this.debug( 'buttonElement:' + this.jsonStr( buttonElement ) );
+ this.debug( 'buttonElement:' + this.jsonStr( this.quickInfo( buttonElement ) ) );
// should be an anchor
this.test.assert( buttonElement.nodeName === button.nodeName,
@@ -135,12 +108,13 @@
this.assertTextContains( href, hrefShouldBe,
buttonName + ' has proper href (' + hrefShouldBe + '): ' + href );
- this.historypanel.hoverOver( button.selector, function testingHover(){
- var tooltipText = button.tooltip;
- this.test.assertVisible( tooltipSelector, buttonName + ' button tooltip is visible when hovering' );
- this.test.assertSelectorHasText( tooltipSelector, tooltipText,
- buttonName + ' button has tooltip text: "' + tooltipText + '"' );
- }, historyFrameInfo );
+ this.historypanel.hoverOver( button.selector );
+ var tooltipText = button.tooltip;
+ this.test.assertVisible( tooltipSelector, buttonName + ' button tooltip is visible when hovering' );
+ this.test.assertSelectorHasText( tooltipSelector, tooltipText,
+ buttonName + ' button has tooltip text: "' + tooltipText + '"' );
+ // clear the tooltip
+ this.page.sendEvent( 'mouseover', 0, 0 );
}
}
@@ -185,6 +159,7 @@
this.test.comment( 'Primary action buttons div should exist and be visible' );
this.test.assertExists( buttonsSelector, 'Primary action buttons div exists' );
this.test.assertVisible( buttonsSelector, 'Primary action buttons div is visible' );
+ //TODO: ...
}
function testSecondaryActionButtons( hdaSelector ){
@@ -193,6 +168,7 @@
this.test.comment( 'Secondary action buttons div should exist and be visible' );
this.test.assertExists( buttonsSelector, 'Secondary action buttons div exists' );
this.test.assertVisible( buttonsSelector, 'Secondary action buttons div is visible' );
+ //TODO: ...
}
function testPeek( hdaSelector, expectedPeekArray ){
@@ -239,112 +215,97 @@
// =================================================================== TESTS
// ------------------------------------------------------------------- ok state
-spaceghost.then( function checkOkState(){
+spaceghost.withHistoryPanel( function(){
this.test.comment( 'HDAs in the "ok" state should be well formed' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
- this.test.assertVisible( uploadSelector, 'HDA is visible' );
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+ this.test.assertVisible( uploadSelector, 'HDA is visible' );
- this.test.comment( 'should have the proper state class' );
- this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
- 'HDA has ok state class' );
+ this.test.comment( 'should have the proper state class' );
+ this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
+ 'HDA has ok state class' );
- // since we're using css there's no great way to test state icon (.state-icon is empty)
+ // since we're using css there's no great way to test state icon (.state-icon is empty)
- this.test.comment( 'should have proper title and hid' );
- testTitle.call( spaceghost, uploadSelector, testUploadInfo.hid, testUploadInfo.name );
+ this.test.comment( 'should have proper title and hid' );
+ testTitle.call( spaceghost, uploadSelector, testUploadInfo.filename );
- this.test.comment( 'should have all of the three, main buttons' );
- testTitleButtonStructure.call( spaceghost, uploadSelector );
+ this.test.comment( 'should have all of the three, main buttons' );
+ testTitleButtonStructure.call( spaceghost, uploadSelector );
- this.test.comment( 'body is not visible before clicking the hda title' );
- var body = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
- this.test.assertNotVisible( body, 'body is not visible' );
+ this.test.comment( 'body is not visible before clicking the hda title' );
+ var body = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
+ this.test.assertNotVisible( body, 'body is not visible' );
- this.test.comment( 'clicking the hda title should expand its body' );
- var hdaTitle = uploadSelector + ' ' + this.historypanel.data.selectors.hda.title;
- this.click( hdaTitle );
- this.wait( 500, function(){
+ this.test.comment( 'clicking the hda title should expand its body' );
+ this.historypanel.thenExpandHda( uploadSelector, function(){
+ // ugh.
+ this.jumpToHistory( function(){
testExpandedBody.call( spaceghost, uploadSelector, summaryShouldBeArray, infoShouldBe, false );
});
});
});
+// restore to collapsed
+spaceghost.then( function(){
+ this.test.comment( "Collapsing hda in 'ok' state should hide body again" );
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
-// restore to collapsed
-spaceghost.then( function collapseOkState(){
- this.test.comment( "Collapsing hda in 'ok' state should hide body again" );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id,
- hdaTitle = uploadSelector + ' ' + this.historypanel.data.selectors.hda.title;
- body = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
-
- this.click( hdaTitle );
- this.wait( 500, function(){
- this.test.assertNotVisible( body, 'body is not visible' );
- });
+ spaceghost.historypanel.thenCollapseHda( uploadSelector, function collapseOkState(){
+ this.test.assertNotVisible( uploadSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ 'body is not visible' );
});
});
+// ------------------------------------------------------------------- new state
+spaceghost.withHistoryPanel( function(){
+ // set state directly through model, wait for re-render
+ //TODO: not ideal to test this
+ this.evaluate( function(){
+ return Galaxy.currHistoryPanel.model.hdas.at( 0 ).set( 'state', 'new' );
+ });
+ this.wait( 1000, function(){
+ this.test.comment( 'HDAs in the "new" state should be well formed' );
-// ------------------------------------------------------------------- new state
-spaceghost.then( function checkNewState(){
- this.test.comment( 'HDAs in the "new" state should be well formed' );
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+ this.test.assertVisible( uploadSelector, 'HDA is visible' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- // set state directly through model
- //TODO: not ideal
- this.evaluate( function(){
- return Galaxy.currHistoryPanel.model.hdas.at( 0 ).set( 'state', 'new' );
- });
- // wait for re-render
- this.wait( 500, function(){
- var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
- this.test.assertVisible( uploadSelector, 'HDA is visible' );
+ // should have proper title and hid
+ testTitle.call( spaceghost, uploadSelector, testUploadInfo.filename );
- // should have proper title and hid
- testTitle.call( spaceghost, uploadSelector, testUploadInfo.hid, testUploadInfo.name );
+ this.test.comment( 'new HDA should have the new state class' );
+ this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses['new'],
+ 'HDA has new state class' );
- this.test.comment( 'new HDA should have the new state class' );
- this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses['new'],
- 'HDA has new state class' );
+ this.test.comment( 'new HDA should NOT have any of the three, main buttons' );
+ var buttonSelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons + ' a';
+ this.test.assertDoesntExist( buttonSelector, 'No display, edit, or delete buttons' );
- this.test.comment( 'new HDA should NOT have any of the three, main buttons' );
- var buttonSelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons + ' a';
- this.test.assertDoesntExist( buttonSelector, 'No display, edit, or delete buttons' );
+ this.test.comment( 'clicking the title of the new HDA will expand the body' );
- this.test.comment( 'clicking the title of the new HDA will expand the body' );
- var hdaTitle = uploadSelector + ' ' + this.historypanel.data.selectors.hda.title;
- this.click( hdaTitle );
- this.wait( 500, function(){
- var bodySelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
- this.test.assertVisible( bodySelector, 'HDA body is visible (after expanding)' );
+ this.historypanel.thenExpandHda( uploadSelector, function(){
+ var bodySelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
+ this.test.assertVisible( bodySelector, 'HDA body is visible (after expanding)' );
- var expectedBodyText = 'This is a new dataset';
- this.test.comment( 'the body should have the text: ' + expectedBodyText );
- this.test.assertSelectorHasText( bodySelector, expectedBodyText,
- 'HDA body has text: ' + expectedBodyText );
-
- // restore to collapsed
- this.click( hdaTitle );
- });
+ var expectedBodyText = 'This is a new dataset';
+ this.test.comment( 'the body should have the text: ' + expectedBodyText );
+ this.test.assertSelectorHasText( bodySelector, expectedBodyText,
+ 'HDA body has text: ' + expectedBodyText );
});
});
});
// restore state, collapse
-spaceghost.then( function revertStateAndCollapse(){
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
+spaceghost.withHistoryPanel( function revertStateAndCollapse(){
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+
+ this.historypanel.thenCollapseHda( uploadSelector, function(){
this.evaluate( function(){
return Galaxy.currHistoryPanel.model.hdas.at( 0 ).set( 'state', 'ok' );
});
- this.wait( 500, function(){
- var hdaTitle = '#' + testUploadInfo.hdaElement.attributes.id
- + ' ' + this.historypanel.data.selectors.hda.title;
- this.click( hdaTitle );
- });
});
+ this.wait( 1000 );
});
-
+/*
+*/
// ===================================================================
spaceghost.run( function(){
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/history-options-tests.js
--- /dev/null
+++ b/test/casperjs/history-options-tests.js
@@ -0,0 +1,145 @@
+// have to handle errors here - or phantom/casper won't bail but _HANG_
+try {
+ var utils = require( 'utils' ),
+ xpath = require( 'casper' ).selectXPath,
+ format = utils.format,
+
+ //...if there's a better way - please let me know, universe
+ scriptDir = require( 'system' ).args[3]
+ // remove the script filename
+ .replace( /[\w|\.|\-|_]*$/, '' )
+ // if given rel. path, prepend the curr dir
+ .replace( /^(?!\/)/, './' ),
+ spaceghost = require( scriptDir + 'spaceghost' ).create({
+ // script options here (can be overridden by CLI)
+ //verbose: true,
+ //logLevel: debug,
+ scriptDir: scriptDir
+ });
+
+ spaceghost.start();
+
+} catch( error ){
+ console.debug( error );
+ phantom.exit( 1 );
+}
+
+// ===================================================================
+/* TODO:
+ possibly break this file up
+*/
+// =================================================================== globals and helpers
+var email = spaceghost.user.getRandomEmail(),
+ password = '123456';
+if( spaceghost.fixtureData.testUser ){
+ email = spaceghost.fixtureData.testUser.email;
+ password = spaceghost.fixtureData.testUser.password;
+ spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+}
+
+// selectors and labels
+var includeDeletedOptionsLabel = spaceghost.historyoptions.data.labels.options.includeDeleted;
+
+// local
+var filepathToUpload = '../../test-data/1.txt',
+ testUploadInfo = {};
+
+
+// =================================================================== TESTS
+// ------------------------------------------------------------------- set up
+// start a new user
+spaceghost.user.loginOrRegisterUser( email, password );
+
+spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
+ testUploadInfo = _uploadInfo;
+});
+
+// ------------------------------------------------------------------- history options menu structure
+//NOTE: options menu should be functionally tested elsewhere
+spaceghost.historypanel.waitForHdas().then( function checkHistoryOptions(){
+ this.test.comment( 'History options icon should be in place and menu should have the proper structure' );
+
+ // check the button and icon
+ this.test.assertExists( this.historyoptions.data.selectors.button, "Found history options button" );
+ this.test.assertVisible( this.historyoptions.data.selectors.button, "History options button is visible" );
+ this.test.assertVisible( this.historyoptions.data.selectors.buttonIcon, "History options icon is visible" );
+
+ // open the menu
+ this.click( this.historyoptions.data.selectors.button );
+ this.test.assertVisible( this.historyoptions.data.selectors.menu,
+ "Menu is visible when options button is clicked" );
+
+ // check the options
+ var historyOptions = this.historyoptions.data.labels.options;
+ for( var optionKey in historyOptions ){
+ if( historyOptions.hasOwnProperty( optionKey ) ){
+ var optionLabel = historyOptions[ optionKey ];
+ this.test.assertVisible( this.historyoptions.data.selectors.optionXpathByLabelFn( optionLabel ),
+ 'Option label is visible: ' + optionLabel );
+ }
+ }
+
+ // clear the menu
+ this.click( 'body' );
+ this.test.assertNotVisible( this.historyoptions.data.selectors.menu,
+ "Clicking away from the menu closes it" );
+});
+
+// ------------------------------------------------------------------- options allow showing/hiding deleted hdas
+spaceghost.then( function(){
+ this.test.comment( 'Deleting HDA' );
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+
+ this.historypanel.deleteHda( uploadSelector, function(){
+ this.test.assertNotExists( uploadSelector, "Deleted HDA is NOT in the DOM" );
+ });
+});
+
+spaceghost.then( function(){
+ this.test.comment( 'History options->' + includeDeletedOptionsLabel + ' shows deleted datasets' );
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+
+ this.historyoptions.includeDeleted( function(){
+ this.test.assertExists( uploadSelector,
+ "Deleted HDA is in the DOM (using history options -> " + includeDeletedOptionsLabel + ")" );
+ this.test.assertVisible( uploadSelector,
+ "Deleted HDA is visible again (using history options -> " + includeDeletedOptionsLabel + ")" );
+ });
+});
+
+spaceghost.then( function(){
+ this.test.comment( 'History options->' + includeDeletedOptionsLabel + ' (again) re-hides deleted datasets' );
+
+ this.historyoptions.excludeDeleted( function(){
+ this.test.assertDoesntExist( '#' + testUploadInfo.hdaElement.attributes.id,
+ "Deleted HDA is not in the DOM (using history options -> " + includeDeletedOptionsLabel + ")" );
+ });
+ // undelete the uploaded file
+ this.historypanel.undeleteHda( '#' + testUploadInfo.hdaElement.attributes.id );
+});
+
+// ------------------------------------------------------------------- hidden hdas aren't shown
+// ------------------------------------------------------------------- history options allows showing hidden hdas
+// can't test this yet w/o a way to make hdas hidden thru the ui or api
+
+
+// ------------------------------------------------------------------- history options collapses all expanded hdas
+spaceghost.then( function(){
+ this.historypanel.thenExpandHda( '#' + testUploadInfo.hdaElement.attributes.id );
+});
+spaceghost.then( function(){
+ this.test.comment( 'History option collapses all expanded hdas' );
+
+ this.historyoptions.collapseExpanded( function(){
+ var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+ this.withHistoryPanel( function(){
+ this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is not visible" );
+ });
+ });
+});
+
+// ===================================================================
+spaceghost.run( function(){
+ this.test.done();
+});
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/history-panel-tests.js
--- a/test/casperjs/history-panel-tests.js
+++ b/test/casperjs/history-panel-tests.js
@@ -72,177 +72,164 @@
// ------------------------------------------------------------------- set up
// start a new user
spaceghost.user.loginOrRegisterUser( email, password );
-//??: why is a reload needed here? If we don't, loggedInAs === '' ...
-spaceghost.thenOpen( spaceghost.baseUrl, function(){
- var loggedInAs = spaceghost.user.loggedInAs();
- this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
+
+// ------------------------------------------------------------------- check structure of empty history
+spaceghost.thenOpen( spaceghost.baseUrl ).historypanel.waitForHdas( function(){
+ this.test.comment( 'history panel with a new, empty history should be well formed' );
+ this.test.comment( "frame should have proper url and title: 'History'" );
+ this.test.assertMatch( this.getCurrentUrl(), /\/history/, 'Found history frame url' );
+ this.test.assertTitle( this.getTitle(), 'History', 'Found history frame title' );
+
+ this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
+ this.test.assertExists( nameSelector, nameSelector + ' exists' );
+ this.test.assertVisible( nameSelector, 'History name is visible' );
+ this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
+
+ this.test.comment( "history subtitle should display size and size should be: " + initialSizeStr );
+ this.test.assertExists( subtitleSelector, 'Found ' + subtitleSelector );
+ this.test.assertVisible( subtitleSelector, 'History subtitle is visible' );
+ this.test.assertSelectorHasText( subtitleSelector, initialSizeStr,
+ 'History subtitle has "' + initialSizeStr + '"' );
+
+ this.test.comment( "tags and annotation icons should be available" );
+ this.test.assertExists( tagIconSelector, 'Tag icon button found' );
+ this.test.assertExists( annoIconSelector, 'Annotation icon button found' );
+
+ this.test.comment( "A message about the current history being empty should be displayed" );
+ this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
+ this.test.assertVisible( emptyMsgSelector, 'Empty history message is visible' );
+ this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
+ 'Message contains "' + emptyMsgStr + '"' );
});
-// grab the history frame bounds for later mouse tests
-spaceghost.then( function(){
- historyFrameInfo = this.getElementInfo( 'iframe[name="galaxy_history"]' );
- //this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
-});
+// ------------------------------------------------------------------- name editing
+spaceghost.withHistoryPanel( function(){
+ this.test.comment( 'history panel, editing the history name' );
-// ------------------------------------------------------------------- check structure of empty history
-spaceghost.thenOpen( spaceghost.baseUrl, function testPanelStructure(){
- this.test.comment( 'history panel, new history' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.test.comment( "frame should have proper url and title: 'History'" );
- this.test.assertMatch( this.getCurrentUrl(), /\/history/, 'Found history frame url' );
- this.test.assertTitle( this.getTitle(), 'History', 'Found history frame title' );
+ this.test.comment( 'name should have a tooltip with proper info on name editing' );
+ this.historypanel.hoverOver( nameSelector );
+ this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
+ this.test.assertSelectorHasText( tooltipSelector, nameTooltip );
+ // clear the tooltip
+ this.page.sendEvent( 'mousemove', -1, -1 );
- this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
- this.test.assertExists( nameSelector, nameSelector + ' exists' );
- this.test.assertVisible( nameSelector, 'History name is visible' );
- this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
+ this.test.comment( 'name should be create an input when clicked' );
+ this.assertHasClass( nameSelector, editableTextClass, "Name field classed for editable text" );
+ this.click( nameSelector );
+ this.test.assertExists( editableTextInput, "Clicking on name creates an input" );
- this.test.comment( "history subtitle should display size and size should be: " + initialSizeStr );
- this.test.assertExists( subtitleSelector, 'Found ' + subtitleSelector );
- this.test.assertVisible( subtitleSelector, 'History subtitle is visible' );
- this.test.assertSelectorHasText( subtitleSelector, initialSizeStr,
- 'History subtitle has "' + initialSizeStr + '"' );
-
- this.test.comment( "tags and annotation icons should be available" );
- this.test.assertExists( tagIconSelector, 'Tag icon button found' );
- this.test.assertExists( annoIconSelector, 'Annotation icon button found' );
-
- this.test.comment( "A message about the current history being empty should be displayed" );
- this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
- this.test.assertVisible( emptyMsgSelector, 'Empty history message is visible' );
- this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
- 'Message contains "' + emptyMsgStr + '"' );
+ this.test.comment( 'name should be editable by entering keys and pressing enter' );
+ //NOTE: casperjs.sendKeys adds a click before and a selector.blur after sending - won't work here
+ this.page.sendEvent( 'keypress', newHistoryName );
+ this.page.sendEvent( 'keypress', this.page.event.key.Enter );
+ // wait for send and re-render name
+ this.wait( 1000, function(){
+ this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
+ this.test.assertDoesntExist( editableTextInput, "Input disappears after pressing enter" );
});
});
-// ------------------------------------------------------------------- name editing
-spaceghost.then( function(){
- this.test.comment( 'history panel, editing the history name' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.test.comment( 'name should have a tooltip with proper info on name editing' );
- var nameInfo = this.getElementInfo( nameSelector );
- this.page.sendEvent( 'mousemove', historyFrameInfo.x + nameInfo.x + 1, historyFrameInfo.y + nameInfo.y + 1 );
- this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
- this.test.assertSelectorHasText( tooltipSelector, nameTooltip );
+spaceghost.withHistoryPanel( function(){
+ this.test.comment( 'name should revert if user clicks away while editing' );
- this.test.comment( 'name should be create an input when clicked' );
- this.assertHasClass( nameSelector, editableTextClass, "Name field classed for editable text" );
- this.click( nameSelector );
- this.test.assertExists( editableTextInput, "Clicking on name creates an input" );
+ this.click( nameSelector );
+ this.page.sendEvent( 'keypress', "Woodchipper metagenomics, Fargo, ND" );
- this.test.comment( 'name should be editable by entering keys and pressing enter' );
- //NOTE: casperjs.sendKeys adds a click before and a selector.blur after sending - won't work here
- this.page.sendEvent( 'keypress', newHistoryName );
- this.page.sendEvent( 'keypress', this.page.event.key.Enter );
- this.wait( 1000, function(){
- this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
- this.test.assertDoesntExist( editableTextInput, "Input disappears after pressing enter" );
- });
+ this.page.sendEvent( 'mousedown', -1, -1 );
+ this.wait( 1000, function(){
+ this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
+ this.test.assertDoesntExist( editableTextInput, "Input disappears after clicking away" );
});
});
-spaceghost.then( function(){
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.test.comment( 'name should revert if user clicks away while editing' );
- this.click( nameSelector );
- this.page.sendEvent( 'keypress', "Woodchipper metagenomics, Fargo, ND" );
- // click above the name input element
- var inputInfo = this.getElementInfo( editableTextInput );
- this.page.sendEvent( 'mousedown', historyFrameInfo.x + inputInfo.x + 1, historyFrameInfo.y + inputInfo.y - 5 );
+spaceghost.withHistoryPanel( function(){
+ this.test.comment( 'name should revert if user hits ESC while editing' );
- this.wait( 1000, function(){
- this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
- this.test.assertDoesntExist( editableTextInput, "Input disappears after clicking away" );
- });
- });
-});
-spaceghost.then( function(){
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.test.comment( 'name should revert if user hits ESC while editing' );
- this.click( nameSelector );
- this.page.sendEvent( 'keypress', "Arsenic Bacteria" );
+ this.click( nameSelector );
+ this.page.sendEvent( 'keypress', "Arsenic Bacteria" );
- this.page.sendEvent( 'keypress', this.page.event.key.Escape );
- this.wait( 1000, function(){
- this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
- this.test.assertDoesntExist( editableTextInput, "Input disappears after hitting ESC" );
- });
+ this.page.sendEvent( 'keypress', this.page.event.key.Escape );
+ this.wait( 1000, function(){
+ this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
+ this.test.assertDoesntExist( editableTextInput, "Input disappears after hitting ESC" );
});
});
// ------------------------------------------------------------------- check structure of NON empty history
// upload file: 1.txt
-spaceghost.then( function upload(){
+spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
this.test.comment( 'uploaded file should appear in history' );
- spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
- this.debug( 'uploaded HDA info: ' + this.jsonStr( _uploadInfo ) );
- var hasHda = _uploadInfo.hdaElement,
- hasClass = _uploadInfo.hdaElement.attributes[ 'class' ],
- hasOkClass = _uploadInfo.hdaElement.attributes[ 'class' ].indexOf( wrapperOkClassName ) !== -1;
- this.test.assert( ( hasHda && hasClass && hasOkClass ), "Uploaded file: " + _uploadInfo.name );
- testUploadInfo = _uploadInfo;
- });
+
+ this.debug( 'uploaded HDA info: ' + this.jsonStr( _uploadInfo ) );
+ var hasHda = _uploadInfo.hdaElement,
+ hasClass = _uploadInfo.hdaElement.attributes[ 'class' ],
+ hasOkClass = _uploadInfo.hdaElement.attributes[ 'class' ].indexOf( wrapperOkClassName ) !== -1;
+ this.test.assert( ( hasHda && hasClass && hasOkClass ), "Uploaded file: " + _uploadInfo.name );
+ testUploadInfo = _uploadInfo;
});
-spaceghost.then( function checkPanelStructure(){
+spaceghost.withHistoryPanel( function checkPanelStructure(){
this.test.comment( 'checking structure of non-empty panel' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
- this.test.assertExists( nameSelector, nameSelector + ' exists' );
- this.test.assertVisible( nameSelector, 'History name is visible' );
- this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
+ this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
+ this.test.assertExists( nameSelector, nameSelector + ' exists' );
+ this.test.assertVisible( nameSelector, 'History name is visible' );
+ this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
- this.test.comment( "history subtitle should display size and size should be " + onetxtFilesize + " bytes" );
- var onetxtFilesize = require( 'fs' ).size( this.options.scriptDir + filepathToUpload ),
- expectedSubtitle = onetxtFilesize + ' bytes';
- this.test.assertExists( subtitleSelector, 'Found ' + subtitleSelector );
- this.test.assertVisible( subtitleSelector, 'History subtitle is visible' );
- this.test.assertSelectorHasText( subtitleSelector, expectedSubtitle,
- 'History subtitle has "' + expectedSubtitle + '"' );
+ this.test.comment( "history subtitle should display size and size should be " + onetxtFilesize + " bytes" );
+ var onetxtFilesize = require( 'fs' ).size( this.options.scriptDir + filepathToUpload ),
+ expectedSubtitle = onetxtFilesize + ' bytes';
+ this.test.assertExists( subtitleSelector, 'Found ' + subtitleSelector );
+ this.test.assertVisible( subtitleSelector, 'History subtitle is visible' );
+ this.test.assertSelectorHasText( subtitleSelector, expectedSubtitle,
+ 'History subtitle has "' + expectedSubtitle + '": ' + this.fetchText( subtitleSelector ).trim() );
- this.test.comment( "tags and annotation icons should be available" );
- this.test.assertExists( tagIconSelector, 'Tag icon button found' );
- this.test.assertExists( annoIconSelector, 'Annotation icon button found' );
+ this.test.comment( "tags and annotation icons should be available" );
+ this.test.assertExists( tagIconSelector, 'Tag icon button found' );
+ this.test.assertExists( annoIconSelector, 'Annotation icon button found' );
- this.test.comment( "A message about the current history being empty should NOT be displayed" );
- this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
- this.test.assertNotVisible( emptyMsgSelector, 'Empty history message is NOT visible' );
- });
+ this.test.comment( "A message about the current history being empty should NOT be displayed" );
+ this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
+ this.test.assertNotVisible( emptyMsgSelector, 'Empty history message is NOT visible' );
});
// ------------------------------------------------------------------- tags
// keeping this light here - better for it's own test file
//TODO: check tooltips
-spaceghost.then( function openTags(){
+spaceghost.withHistoryPanel( function openTags(){
this.test.comment( 'tag area should open when the history panel tag icon is clicked' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.mouseEvent( 'click', tagIconSelector );
- this.wait( 1000, function(){
- this.test.assertVisible( tagAreaSelector, 'Tag area is now displayed' );
- });
+
+ this.click( tagIconSelector );
+ this.wait( 1000, function(){
+ this.test.assertVisible( tagAreaSelector, 'Tag area is now displayed' );
+ });
+});
+spaceghost.withHistoryPanel( function closeAnnotation(){
+ this.test.comment( 'annotation area should close when the history panel tag icon is clicked again' );
+
+ this.click( tagIconSelector );
+ this.wait( 1000, function(){
+ this.test.assertNotVisible( tagAreaSelector, 'Tag area is now hidden' );
});
});
// ------------------------------------------------------------------- annotation
// keeping this light here - better for it's own test file
//TODO: check tooltips
-spaceghost.then( function openAnnotation(){
+spaceghost.withHistoryPanel( function openAnnotation(){
this.test.comment( 'annotation area should open when the history panel annotation icon is clicked' );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.mouseEvent( 'click', annoIconSelector );
- this.wait( 1000, function(){
- this.test.assertVisible( annoAreaSelector, 'Annotation area is now displayed' );
- });
+
+ this.click( annoIconSelector );
+ this.wait( 1000, function(){
+ this.test.assertVisible( annoAreaSelector, 'Annotation area is now displayed' );
});
});
-spaceghost.then( function closeAnnotation(){
+spaceghost.withHistoryPanel( function closeAnnotation(){
this.test.comment( 'annotation area should close when the history panel tag icon is clicked again' );
- this.withFrame( spaceghost.data.selectors.frames.history, function bler(){
- this.mouseEvent( 'click', annoIconSelector );
- this.wait( 1000, function(){
- this.test.assertNotVisible( annoAreaSelector, 'Tag area is now hidden' );
- });
+
+ this.click( annoIconSelector );
+ this.wait( 1000, function(){
+ this.test.assertNotVisible( annoAreaSelector, 'Annotation area is now hidden' );
});
});
@@ -261,96 +248,16 @@
});
});
-// ------------------------------------------------------------------- history options menu structure
-//NOTE: options menu should be functionally tested elsewhere
-spaceghost.then( function historyOptions(){
- this.test.comment( 'History options icon should be in place and menu should have the proper structure' );
-
- // check the button and icon
- this.test.assertExists( this.historyoptions.data.selectors.button, "Found history options button" );
- this.test.assertVisible( this.historyoptions.data.selectors.button, "History options button is visible" );
- this.test.assertVisible( this.historyoptions.data.selectors.buttonIcon, "History options icon is visible" );
-
- // open the menu
- this.click( this.historyoptions.data.selectors.button );
- this.test.assertVisible( this.historyoptions.data.selectors.menu,
- "Menu is visible when options button is clicked" );
-
- // check the options
- for( var optionKey in this.historyoptions.data.labels.options ){
- if( this.historyoptions.data.labels.options.hasOwnProperty( optionKey ) ){
- var optionLabel = this.historyoptions.data.labels.options[ optionKey ],
- optionXpath = this.historyoptions.data.selectors.optionXpathByLabelFn( optionLabel );
- this.test.assertVisible( optionXpath, 'Option label is visible: ' + optionLabel );
- }
- }
-});
-
-// ------------------------------------------------------------------- deleted hdas aren't in the dom
-spaceghost.then( function(){
- this.test.comment( 'deleted hdas shouldn\'t be in the history panel DOM' );
-
- this.historypanel.deleteHda( '#' + testUploadInfo.hdaElement.attributes.id, function(){
- this.test.assertDoesntExist( '#' + testUploadInfo.hdaElement.attributes.id,
- "Deleted HDA is not in the DOM" );
- });
-});
-
-// ------------------------------------------------------------------- options allow showing/hiding deleted hdas
-spaceghost.then( function(){
- this.test.comment( 'History options->' + includeDeletedOptionsLabel + ' shows deleted datasets' );
-
- this.historyoptions.includeDeleted();
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.waitForSelector( nameSelector, function(){
- this.test.assertExists( '#' + testUploadInfo.hdaElement.attributes.id,
- "Deleted HDA is in the DOM (using history options -> " + includeDeletedOptionsLabel + ")" );
- this.test.assertVisible( '#' + testUploadInfo.hdaElement.attributes.id,
- "Deleted HDA is visible again (using history options -> " + includeDeletedOptionsLabel + ")" );
- });
- });
-});
-
-spaceghost.then( function(){
- this.test.comment( 'History options->' + includeDeletedOptionsLabel + ' (again) re-hides deleted datasets' );
-
- this.historyoptions.includeDeleted();
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.waitForSelector( nameSelector, function(){
- this.test.assertDoesntExist( '#' + testUploadInfo.hdaElement.attributes.id,
- "Deleted HDA is not in the DOM (using history options -> " + includeDeletedOptionsLabel + ")" );
- });
- });
-});
-
-// undelete the uploaded file
-spaceghost.then( function(){
- this.historyoptions.includeDeleted();
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.waitForSelector( nameSelector, function(){
- //TODO: to conv. fn
- this.click( '#' + testUploadInfo.hdaElement.attributes.id
- + ' ' + this.historypanel.data.selectors.history.undeleteLink );
- });
- });
-});
-
-// ------------------------------------------------------------------- hidden hdas aren't shown
-// ------------------------------------------------------------------- history options allows showing hidden hdas
-// can't test this yet w/o a way to make hdas hidden thru the ui or api
-
// ------------------------------------------------------------------- hdas can be expanded by clicking on the hda name
// broken in webkit w/ jq 1.7
-spaceghost.then( function(){
+spaceghost.historypanel.waitForHdas( function(){
this.test.comment( 'HDAs can be expanded by clicking on the name' );
var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.click( uploadedSelector + ' .historyItemTitle' );
- this.wait( 1000, function(){
- this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
- "Body for uploaded file is visible" );
- });
+ this.click( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.title );
+ this.wait( 1000, function(){
+ this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is visible" );
});
});
@@ -360,26 +267,22 @@
var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
this.click( refreshButtonSelector );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.waitForSelector( nameSelector, function(){
- this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
- "Body for uploaded file is visible" );
- });
+ this.historypanel.waitForHdas( function(){
+ this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is visible" );
});
// this will break: webkit + jq 1.7
});
// ------------------------------------------------------------------- expanded hdas collapse by clicking name again
-spaceghost.then( function(){
+spaceghost.withHistoryPanel( function(){
this.test.comment( 'Expanded hdas collapse by clicking name again' );
var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.click( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.title );
- this.wait( 500, function(){
- this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
- "Body for uploaded file is not visible" );
- });
+ this.click( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.title );
+ this.wait( 500, function(){
+ this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is not visible" );
});
});
@@ -389,32 +292,9 @@
var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
this.click( refreshButtonSelector );
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.waitForSelector( nameSelector, function(){
- this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
- "Body for uploaded file is not visible" );
- });
- });
-});
-
-// ------------------------------------------------------------------- history options collapses all expanded hdas
-spaceghost.then( function(){
- // expand again
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.click( '#' + testUploadInfo.hdaElement.attributes.id + ' ' + this.historypanel.data.selectors.hda.title );
- this.wait( 500, function(){});
- });
-});
-spaceghost.then( function(){
- this.test.comment( 'History option collapses all expanded hdas' );
- var uploadedSelector = '#' + testUploadInfo.hdaElement.attributes.id;
-
- this.historyoptions.collapseExpanded();
- this.wait( 500, function(){
- this.withFrame( spaceghost.data.selectors.frames.history, function(){
- this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
- "Body for uploaded file is not visible" );
- });
+ this.historypanel.waitForHdas( function(){
+ this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+ "Body for uploaded file is not visible" );
});
});
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/login-tests.js
--- a/test/casperjs/login-tests.js
+++ b/test/casperjs/login-tests.js
@@ -39,6 +39,8 @@
password = spaceghost.fixtureData.testUser.password;
}
+var userEmailSelector = '//a[contains(text(),"Logged in as")]/span["id=#user-email"]';
+
// =================================================================== TESTS
// register a user (again...)
spaceghost.thenOpen( spaceghost.baseUrl, function(){
@@ -52,8 +54,7 @@
spaceghost.user.logout();
});
spaceghost.then( function(){
- this.test.assertSelectorDoesntHaveText(
- xpath( '//a[contains(text(),"Logged in as")]/span["id=#user-email"]' ), /\w/ );
+ this.test.assertSelectorDoesntHaveText( xpath( userEmailSelector ), /\w/ );
this.test.assert( spaceghost.user.loggedInAs() === '', 'loggedInAs() is empty string' );
});
@@ -63,8 +64,7 @@
spaceghost.user._submitLogin( email, password ); //No such user
});
spaceghost.thenOpen( spaceghost.baseUrl, function(){
- this.test.assertSelectorHasText(
- xpath( '//a[contains(text(),"Logged in as")]/span["id=#user-email"]' ), email );
+ this.test.assertSelectorHasText( xpath( userEmailSelector ), email );
this.test.assert( spaceghost.user.loggedInAs() === email, 'loggedInAs() matches email' );
});
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/modules/api.js
--- /dev/null
+++ b/test/casperjs/modules/api.js
@@ -0,0 +1,231 @@
+// =================================================================== module object, exports
+/** Creates a new api module object.
+ * @param {SpaceGhost} spaceghost a spaceghost instance
+ * @exported
+ */
+exports.create = function createAPI( spaceghost, apikey ){
+ return new API( spaceghost );
+};
+
+/** User object constructor.
+ * @param {SpaceGhost} spaceghost a spaceghost instance
+ * @param {String} apikey apikey for use when not using session authentication
+ */
+var API = function API( spaceghost, apikey ){
+ this.spaceghost = spaceghost;
+ this.apikey = apikey;
+
+ this.encodedIdExpectedLength = 16;
+ this.jQueryLocation = '../../static/scripts/libs/jquery/jquery.js';
+
+ this.histories = new HistoriesAPI( this );
+ this.hdas = new HDAAPI( this );
+};
+exports.API = API;
+
+API.prototype.toString = function toString(){
+ return ( this.spaceghost + '.API:'
+ + (( this.apikey )?( this.apikey ):( '(session)' )) );
+};
+
+// ------------------------------------------------------------------- APIError
+APIError.prototype = new Error();
+APIError.prototype.constructor = Error;
+/** @class Thrown when Galaxy the API returns an error from a request */
+function APIError( msg ){
+ Error.apply( this, arguments );
+ this.name = "APIError";
+ this.message = msg;
+}
+exports.APIError = APIError;
+
+/* ------------------------------------------------------------------- TODO:
+ can we component-ize this to become the basis for js-based api binding/resource
+
+*/
+// =================================================================== INTERNAL
+var utils = require( 'utils' );
+
+API.prototype._ajax = function _ajax( url, options ){
+ options = options || {};
+ options.async = false;
+
+ this.ensureJQuery( '../../static/scripts/libs/jquery/jquery.js' );
+ var resp = this.spaceghost.evaluate( function( url, options ){
+ return jQuery.ajax( url, options );
+ }, url, options );
+ //this.spaceghost.debug( 'resp: ' + this.spaceghost.jsonStr( resp ) );
+
+ if( resp.status !== 200 ){
+ // grrr... this doesn't lose the \n\r\t
+ throw new APIError( resp.responseText.replace( /[\s\n\r\t]+/gm, ' ' ).replace( /"/, '' ) );
+ }
+ return JSON.parse( resp.responseText );
+};
+
+// =================================================================== MISC
+API.prototype.isEncodedId = function isEncodedId( id ){
+ if( typeof id !== 'string' ){ return false; }
+ if( id.match( /[g-zG-Z]/ ) ){ return false; }
+ return ( id.length === this.encodedIdExpectedLength );
+};
+
+// ------------------------------------------------------------------- is type or throw err
+API.prototype.ensureId = function ensureId( id ){
+ if( !this.isEncodedId( id ) ){
+ throw new APIError( 'ID is not a valid encoded id: ' + id );
+ }
+ return id;
+};
+
+API.prototype.ensureObject = function ensureObject( obj ){
+ if( !utils.isObject( obj ) ){
+ throw new APIError( 'Not a valid object: ' + obj );
+ }
+ return obj;
+};
+
+// ------------------------------------------------------------------- jquery
+// using jq for the ajax in this module - that's why these are here
+//TODO:?? could go in spaceghost
+API.prototype.hasJQuery = function hasJQuery(){
+ return this.spaceghost.evaluate( function pageHasJQuery(){
+ var has = false;
+ try {
+ has = typeof ( jQuery + '' ) === 'string';
+ } catch( err ){}
+ return has;
+ });
+};
+
+API.prototype.ensureJQuery = function ensureJQuery(){
+ if( !this.hasJQuery() ){
+ var absLoc = this.spaceghost.options.scriptDir + this.jQueryLocation,
+ injected = this.spaceghost.page.injectJs( absLoc );
+ if( !injected ){
+ throw new APIError( 'Could not inject jQuery' );
+ }
+ }
+};
+
+
+// =================================================================== HISTORIES
+var HistoriesAPI = function HistoriesAPI( api ){
+ this.api = api;
+};
+HistoriesAPI.prototype.toString = function toString(){
+ return this.api + '.HistoriesAPI';
+};
+
+// -------------------------------------------------------------------
+HistoriesAPI.prototype.urlTpls = {
+ index : 'api/histories',
+ show : 'api/histories/%s',
+ create : 'api/histories',
+ delete_ : 'api/histories/%s',
+ undelete: 'api/histories/deleted/%s/undelete'
+};
+
+HistoriesAPI.prototype.index = function index( deleted ){
+ this.api.spaceghost.info( 'history.index: ' + (( deleted )?( 'w deleted' ):( '(wo deleted)' )) );
+
+ deleted = deleted || false;
+ return this.api._ajax( this.urlTpls.index, {
+ data : { deleted: deleted }
+ });
+};
+
+HistoriesAPI.prototype.show = function show( id, deleted ){
+ this.api.spaceghost.info( 'history.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] );
+
+ id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
+ deleted = deleted || false;
+ return this.api._ajax( utils.format( this.urlTpls.show, id ), {
+ data : { deleted: deleted }
+ });
+};
+
+HistoriesAPI.prototype.create = function create( payload ){
+ this.api.spaceghost.info( 'history.create: ' + this.api.spaceghost.jsonStr( payload ) );
+
+ // py.payload <-> ajax.data
+ payload = this.api.ensureObject( payload );
+ return this.api._ajax( utils.format( this.urlTpls.create ), {
+ type : 'POST',
+ data : payload
+ });
+};
+
+HistoriesAPI.prototype.delete_ = function delete_( id, purge ){
+ this.api.spaceghost.info( 'history.delete: ' + [ id, (( purge )?( '(purge!)' ):( '' )) ] );
+
+ // py.payload <-> ajax.data
+ var payload = ( purge )?({ purge: true }):({});
+ return this.api._ajax( utils.format( this.urlTpls.delete_, this.api.ensureId( id ) ), {
+ type : 'DELETE',
+ data : payload
+ });
+};
+
+HistoriesAPI.prototype.undelete = function undelete( id ){
+ //throw ( 'unimplemented' );
+ this.api.spaceghost.info( 'history.undelete: ' + id );
+
+ return this.api._ajax( utils.format( this.urlTpls.undelete, this.api.ensureId( id ) ), {
+ type : 'POST'
+ });
+};
+
+
+// =================================================================== HDAS
+var HDAAPI = function HDAAPI( api ){
+ this.api = api;
+};
+HDAAPI.prototype.toString = function toString(){
+ return this.api + '.HDAAPI';
+};
+
+// -------------------------------------------------------------------
+HDAAPI.prototype.urlTpls = {
+ index : 'api/histories/%s/contents',
+ show : 'api/histories/%s/contents/%s',
+ create : 'api/histories/%s/contents'//,
+ // not implemented
+ //delete_ : 'api/histories/%s',
+ //undelete: 'api/histories/deleted/%s/undelete'
+};
+
+HDAAPI.prototype.index = function index( historyId, ids ){
+ this.api.spaceghost.info( 'history.index: ' + [ historyId, ids ] );
+ var data = {};
+ if( ids ){
+ ids = ( utils.isArray( ids ) )?( ids.join( ',' ) ):( ids );
+ data.ids = ids;
+ }
+
+ return this.api._ajax( utils.format( this.urlTpls.index, this.api.ensureId( historyId ) ), {
+ data : data
+ });
+};
+
+HDAAPI.prototype.show = function show( historyId, id, deleted ){
+ this.api.spaceghost.info( 'history.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] );
+
+ id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
+ deleted = deleted || false;
+ return this.api._ajax( utils.format( this.urlTpls.show, id ), {
+ data : { deleted: deleted }
+ });
+};
+
+HDAAPI.prototype.create = function create( historyId, payload ){
+ this.api.spaceghost.info( 'history.create: ' + this.api.spaceghost.jsonStr( payload ) );
+
+ // py.payload <-> ajax.data
+ payload = this.api.ensureObject( payload );
+ return this.api._ajax( utils.format( this.urlTpls.create ), {
+ type : 'POST',
+ data : payload
+ });
+};
+
diff -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 -r f19741fdb9a6b6f15f3e9b983d4a8228b205ffda test/casperjs/modules/historyoptions.js
--- a/test/casperjs/modules/historyoptions.js
+++ b/test/casperjs/modules/historyoptions.js
@@ -21,23 +21,65 @@
// -------------------------------------------------------------------
/* TODO:
+ some of the fns below can be applied to any popup
*/
+// =================================================================== internal
+var xpath = require( 'casper' ).selectXPath;
+
// =================================================================== API (external)
/** Just open the menu
+ * @param {Function} fn function to call when the menu opens
+ * @returns {Any} the return value of fn
*/
-HistoryOptions.prototype.openMenu = function openMenu(){
- this.spaceghost.click( this.data.selectors.button );
+HistoryOptions.prototype.openMenu = function openMenu( fn ){
+ return this.spaceghost.jumpToTop( function(){
+ this.click( this.historyoptions.data.selectors.button );
+ return fn.call( this );
+ });
};
/** Click an option by Label
+ * @param {String} optionLabel the label of the option to click (can be partial?)
+ * @returns {SpaceGhost} for chaining
*/
HistoryOptions.prototype.clickOption = function clickOption( optionLabel ){
- this.openMenu();
- // casperjs clickLabel
- var optionXpath = this.data.selectors.optionXpathByLabelFn( optionLabel );
- this.spaceghost.click( optionXpath );
+ this.openMenu( function(){
+ this.click( this.historyoptions.data.selectors.optionXpathByLabelFn( optionLabel ) );
+ // shouldnt need to clear - clicking an option will do that
+ });
+ return this.spaceghost;
+};
+
+/** Is the history option with the given label showing as toggled?
+ * @param {String} optionLabel the label of the option to check (can be partial?)
+ * @returns {Boolean} true if the option is on, false if off OR not a toggle
+ */
+HistoryOptions.prototype.isOn = function isOn( optionLabel ){
+ return this.openMenu( function(){
+ var toggleIconInfo = this.elementInfoOrNull(
+ this.historyoptions.data.selectors.optionIsOnXpathByLabelFn( optionLabel ) );
+ // have to clear manually
+ this.click( 'body' );
+ return !!toggleIconInfo;
+ });
+};
+
+/** Toggle the option - optionally forcing to on or off.
+ * @param {String} optionLabel the label of the option to check (can be partial?)
+ * @param {Boolean} force if true ensure option is on, if false ensure it's off,
+ * if undefined simply toggle
+ * @returns {Boolean} true if the option is now on, false if now off or not a toggle
+ */
+HistoryOptions.prototype.toggle = function toggle( optionLabel, force ){
+ var isOn = this.isOn( optionLabel );
+ if( ( force === false && isOn )
+ || ( force === true && !isOn )
+ || ( force === undefined ) ){
+ return this.clickOption( optionLabel );
+ }
+ return force;
};
// -------------------------------------------------------------------
@@ -101,22 +143,68 @@
//};
-// -------------------------------------------------------------------
+// ------------------------------------------------------------------- check the togglable options
// these are easy, one click options (they don't open a new page)
+/** Is 'Include Deleted Datasets' on (accrd. to the menu)?
+ */
+HistoryOptions.prototype.deletedAreIncluded = function deletedAreIncluded(){
+ return this.isOn( this.data.labels.options.includeDeleted );
+};
+/** Is 'Include Deleted Datasets' on (accrd. to the menu)?
+ */
+HistoryOptions.prototype.hiddenAreIncluded = function hiddenAreIncluded(){
+ return this.isOn( this.data.labels.options.includeHidden );
+};
+
+// ------------------------------------------------------------------- options that control the hpanel
/** corresponds to history options menu: 'Collapse Expanded Datasets'
*/
-HistoryOptions.prototype.collapseExpanded = function collapseExpanded(){
- this.clickOption( this.data.labels.options.collapseExpanded );
+HistoryOptions.prototype.collapseExpanded = function collapseExpanded( then ){
+ return this.spaceghost.then( function(){
+ this.historyoptions.clickOption( this.historyoptions.data.labels.options.collapseExpanded );
+ this.wait( 500, then );
+ });
};
-/** corresponds to history options menu: 'Include Deleted Datasets'
+
+/** set 'Include Deleted Datasets' to on
+ * @param {Function} then casper step to run when option is set
*/
-HistoryOptions.prototype.includeDeleted = function includeDeleted(){
- this.clickOption( this.data.labels.options.includeDeleted );
+HistoryOptions.prototype.includeDeleted = function includeDeleted( then ){
+ return this.spaceghost.then( function(){
+ this.historyoptions.toggle( this.historyoptions.data.labels.options.includeDeleted, true );
+ this.historypanel.waitForHdas( then );
+ });
};
-/** corresponds to history options menu: 'Include Hidden Datasets'
+
+/** set 'Include Deleted Datasets' to off
+ * @param {Function} then casper step to run when option is set
*/
-HistoryOptions.prototype.includeHidden = function includeHidden(){
- this.clickOption( this.data.labels.options.includeHidden );
+HistoryOptions.prototype.excludeDeleted = function excludeDeleted( then ){
+ return this.spaceghost.then( function(){
+ this.historyoptions.toggle( this.historyoptions.data.labels.options.includeDeleted, false );
+ //TODO:?? this puts in the history frame. Is that what we want?
+ this.historypanel.waitForHdas( then );
+ });
+};
+
+/** set 'Include Hidden Datasets' to on
+ * @param {Function} then casper step to run when option is set
+ */
+HistoryOptions.prototype.includeHidden = function includeHidden( then ){
+ return this.spaceghost.then( function(){
+ this.historyoptions.toggle( this.historyoptions.data.labels.options.includeHidden, true );
+ this.historypanel.waitForHdas( then );
+ });
+};
+
+/** set 'Include Hidden Datasets' to off
+ * @param {Function} then casper step to run when option is set
+ */
+HistoryOptions.prototype.excludeHidden = function excludeHidden( then ){
+ return this.spaceghost.then( function(){
+ this.historyoptions.toggle( this.historyoptions.data.labels.options.includeHidden, false );
+ this.historypanel.waitForHdas( then );
+ });
};
@@ -129,6 +217,10 @@
menu : '#history-options-button-menu',
optionXpathByLabelFn : function optionXpathByLabelFn( label ){
return xpath( '//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"' + label + '")]]' );
+ },
+ optionIsOnXpathByLabelFn : function optionIsOnXpathByLabelFn( label ){
+ return xpath( '//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"' + label + '")]]'
+ + '/span[@class="fa-icon-ok"]' );
}
},
labels : {
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Fix unicode decoding error when displaying the contents of certain files. Catch any future errors.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7b5f97700fdc/
changeset: 7b5f97700fdc
branch: next-stable
user: inithello
date: 2013-03-18 19:32:06
summary: Fix unicode decoding error when displaying the contents of certain files. Catch any future errors.
affected #: 1 file
diff -r e0607a09d9fa9eb690e9b410d9c167072db0b55a -r 7b5f97700fdc8286db3e5ed6b6425127694ca42f lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1059,7 +1059,12 @@
"""Translates the characters in text to an html string"""
if text:
if to_html:
- escaped_text = str( markupsafe.escape( text ) )
+ try:
+ escaped_text = text.decode( 'utf-8' )
+ escaped_text = escaped_text.encode( 'ascii', 'ignore' )
+ escaped_text = str( markupsafe.escape( escaped_text ) )
+ except UnicodeDecodeError, e:
+ escaped_text = "Error decoding string: %s" % str( e )
else:
escaped_text = str( text )
translated = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Fix unicode decoding error when displaying the contents of certain files. Catch any future errors.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/51d3a88f45ee/
changeset: 51d3a88f45ee
user: inithello
date: 2013-03-18 19:32:06
summary: Fix unicode decoding error when displaying the contents of certain files. Catch any future errors.
affected #: 1 file
diff -r 7a4dd13dac9be2b3c9233b7a60e61d30d283744f -r 51d3a88f45ee2e3aa89395d2df13b5452d463274 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1059,7 +1059,12 @@
"""Translates the characters in text to an html string"""
if text:
if to_html:
- escaped_text = str( markupsafe.escape( text ) )
+ try:
+ escaped_text = text.decode( 'utf-8' )
+ escaped_text = escaped_text.encode( 'ascii', 'ignore' )
+ escaped_text = str( markupsafe.escape( escaped_text ) )
+ except UnicodeDecodeError, e:
+ escaped_text = "Error decoding string: %s" % str( e )
else:
escaped_text = str( text )
translated = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Include LWR and job running fixes from John Chilton in next-stable.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e0607a09d9fa/
changeset: e0607a09d9fa
branch: next-stable
user: natefoo
date: 2013-03-18 16:38:34
summary: Include LWR and job running fixes from John Chilton in next-stable.
affected #: 10 files
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -7,6 +7,7 @@
<plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner"/><plugin id="pbs" type="runner" load="galaxy.jobs.runners.pbs:PBSJobRunner" workers="2"/><plugin id="gridengine" type="runner" load="galaxy.jobs.runners.drmaa:DRMAARunner"/>
+ <plugin id="lwr" type="runner" load="galaxy.jobs.runners.lwr.LwrJobRunner" /><!-- https://lwr.readthedocs.org --></plugins><handlers default="handlers"><!-- Additional job handlers - the id should match the name of a
@@ -40,6 +41,11 @@
<param id="type">python</param><param id="function">foo</param></destination>
+ <destination id="secure_lwr" runner="lwr">
+ <param id="url">https://windowshost.examle.com:8913/</param>
+ <!-- If set, private_token must match token remote LWR server configured with. -->
+ <param id="private_token">123456789changeme</param>
+ </destination></destinations><tools><!-- Tools can be configured to use specific destinations or handlers,
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -126,7 +126,12 @@
for plugin in self.__findall_with_required(plugins, 'plugin', ('id', 'type', 'load')):
if plugin.get('type') == 'runner':
workers = plugin.get('workers', plugins.get('workers', JobConfiguration.DEFAULT_NWORKERS))
- self.runner_plugins.append(dict(id=plugin.get('id'), load=plugin.get('load'), workers=int(workers)))
+ runner_kwds = self.__get_params(plugin)
+ runner_info = dict(id=plugin.get('id'),
+ load=plugin.get('load'),
+ workers=int(workers),
+ kwds=runner_kwds)
+ self.runner_plugins.append(runner_info)
else:
log.error('Unknown plugin type: %s' % plugin.get('type'))
# Load tasks if configured
@@ -480,7 +485,7 @@
log.warning("Job runner classes must be subclassed from BaseJobRunner, %s has bases: %s" % (id, runner_class.__bases__))
continue
try:
- rval[id] = runner_class( self.app, runner['workers'] )
+ rval[id] = runner_class( self.app, runner[ 'workers' ], **runner.get( 'kwds', {} ) )
except TypeError:
log.warning( "Job runner '%s:%s' has not been converted to a new-style runner" % ( module_name, class_name ) )
rval[id] = runner_class( self.app )
@@ -833,7 +838,7 @@
log.warning('set_runner() is deprecated, use set_job_destination()')
self.set_job_destination(self.job_destination, external_id)
- def set_job_destination(self, job_destination, external_id):
+ def set_job_destination(self, job_destination, external_id=None ):
"""
Persist job destination params in the database for recovery.
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -3,11 +3,12 @@
from galaxy import model
from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner
+from galaxy.jobs import JobDestination
import errno
from time import sleep
-from lwr_client import FileStager, Client
+from lwr_client import FileStager, Client, url_to_destination_params
log = logging.getLogger( __name__ )
@@ -19,11 +20,16 @@
"""
runner_name = "LWRRunner"
- def __init__( self, app, nworkers ):
+ def __init__( self, app, nworkers, transport=None ):
"""Start the job runner """
super( LwrJobRunner, self ).__init__( app, nworkers )
self._init_monitor_thread()
self._init_worker_threads()
+ self.transport_type = transport
+
+ def url_to_destination( self, url ):
+ """Convert a legacy URL to a job destination"""
+ return JobDestination( runner="lwr", params=url_to_destination_params( url ) )
def check_watched_item(self, job_state):
try:
@@ -42,7 +48,7 @@
def queue_job(self, job_wrapper):
stderr = stdout = command_line = ''
- runner_url = job_wrapper.get_job_runner_url()
+ job_destination = job_wrapper.job_destination
try:
job_wrapper.prepare()
@@ -74,7 +80,7 @@
rebuilt_command_line = file_stager.get_rewritten_command_line()
job_id = file_stager.job_id
client.launch( rebuilt_command_line )
- job_wrapper.set_runner( runner_url, job_id )
+ job_wrapper.set_job_destination( job_destination, job_id )
job_wrapper.change_state( model.Job.states.RUNNING )
except Exception, exc:
@@ -87,32 +93,26 @@
lwr_job_state.job_id = job_id
lwr_job_state.old_state = True
lwr_job_state.running = True
- lwr_job_state.runner_url = runner_url
+ lwr_job_state.job_destination = job_destination
self.monitor_job(lwr_job_state)
def get_output_files(self, job_wrapper):
output_fnames = job_wrapper.get_output_fnames()
return [ str( o ) for o in output_fnames ]
-
- def determine_lwr_url(self, url):
- lwr_url = url[ len( 'lwr://' ) : ]
- return lwr_url
-
def get_client_from_wrapper(self, job_wrapper):
job_id = job_wrapper.job_id
if hasattr(job_wrapper, 'task_id'):
job_id = "%s_%s" % (job_id, job_wrapper.task_id)
- return self.get_client( job_wrapper.get_job_runner_url(), job_id )
+ return self.get_client( job_wrapper.job_destination.params, job_id )
def get_client_from_state(self, job_state):
- job_runner = job_state.runner_url
+ job_destination_params = job_state.job_destination.params
job_id = job_state.job_id
- return self.get_client(job_runner, job_id)
+ return self.get_client( job_destination_params, job_id )
- def get_client(self, job_runner, job_id):
- lwr_url = self.determine_lwr_url( job_runner )
- return Client(lwr_url, job_id)
+ def get_client( self, job_destination_params, job_id ):
+ return Client( job_destination_params, job_id, transport_type=self.transport_type )
def finish_job( self, job_state ):
stderr = stdout = command_line = ''
@@ -210,7 +210,7 @@
lwr_url = job.job_runner_name
job_id = job.job_runner_external_id
log.debug("Attempt remote lwr kill of job with url %s and id %s" % (lwr_url, job_id))
- client = self.get_client(lwr_url, job_id)
+ client = self.get_client(job.destination_params, job_id)
client.kill()
@@ -219,6 +219,7 @@
job_state = AsynchronousJobState()
job_state.job_id = str( job.get_job_runner_external_id() )
job_state.runner_url = job_wrapper.get_job_runner_url()
+ job_state.job_destination = job_wrapper.job_destination
job_wrapper.command_line = job.get_command_line()
job_state.job_wrapper = job_wrapper
if job.get_state() == model.Job.states.RUNNING:
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -5,502 +5,9 @@
This module contains logic for interfacing with an external LWR server.
"""
-import os
-import re
-import time
-import urllib
-import simplejson
+from .stager import FileStager
+from .client import Client
+from .destination import url_to_destination_params
-from transport import get_transport
-
-
-class JobInputs(object):
- """
- Abstractions over dynamic inputs created for a given job (namely the command to
- execute and created configfiles).
-
- **Parameters**
-
- command_line : str
- Local command to execute for this job. (To be rewritten.)
- config_files : str
- Config files created for this job. (To be rewritten.)
-
-
- >>> import tempfile
- >>> tf = tempfile.NamedTemporaryFile()
- >>> def setup_inputs(tf):
- ... open(tf.name, "w").write("world /path/to/input the rest")
- ... inputs = JobInputs("hello /path/to/input", [tf.name])
- ... return inputs
- >>> inputs = setup_inputs(tf)
- >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
- >>> inputs.rewritten_command_line
- 'hello C:\\\\input'
- >>> inputs.rewritten_config_files[tf.name]
- 'world C:\\\\input the rest'
- >>> tf.close()
- >>> tf = tempfile.NamedTemporaryFile()
- >>> inputs = setup_inputs(tf)
- >>> inputs.find_referenced_subfiles('/path/to')
- ['/path/to/input']
- >>> inputs.path_referenced('/path/to')
- True
- >>> inputs.path_referenced('/path/to/input')
- True
- >>> inputs.path_referenced('/path/to/notinput')
- False
- >>> tf.close()
- """
-
- def __init__(self, command_line, config_files):
- self.rewritten_command_line = command_line
- self.rewritten_config_files = {}
- for config_file in config_files or []:
- config_contents = _read(config_file)
- self.rewritten_config_files[config_file] = config_contents
-
- def find_referenced_subfiles(self, directory):
- """
- Return list of files below specified `directory` in job inputs. Could
- use more sophisticated logic (match quotes to handle spaces, handle
- subdirectories, etc...).
-
- **Parameters**
-
- directory : str
- Full path to directory to search.
-
- """
- pattern = r"(%s%s\S+)" % (directory, os.sep)
- referenced_files = set()
- for input_contents in self.__items():
- referenced_files.update(re.findall(pattern, input_contents))
- return list(referenced_files)
-
- def path_referenced(self, path):
- pattern = r"%s" % path
- found = False
- for input_contents in self.__items():
- if re.findall(pattern, input_contents):
- found = True
- break
- return found
-
- def rewrite_paths(self, local_path, remote_path):
- """
- Rewrite references to `local_path` with `remote_path` in job inputs.
- """
- self.__rewrite_command_line(local_path, remote_path)
- self.__rewrite_config_files(local_path, remote_path)
-
- def __rewrite_command_line(self, local_path, remote_path):
- self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
-
- def __rewrite_config_files(self, local_path, remote_path):
- for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
- self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
-
- def __items(self):
- items = [self.rewritten_command_line]
- items.extend(self.rewritten_config_files.values())
- return items
-
-
-class FileStager(object):
- """
- Objects of the FileStager class interact with an LWR client object to
- stage the files required to run jobs on a remote LWR server.
-
- **Parameters**
-
- client : Client
- LWR client object.
- command_line : str
- The local command line to execute, this will be rewritten for the remote server.
- config_files : list
- List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
- input_files : list
- List of input files used by job. These will be transferred and references rewritten.
- output_files : list
- List of output_files produced by job.
- tool_dir : str
- Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
- working_directory : str
- Local path created by Galaxy for running this job.
-
- """
-
- def __init__(self, client, tool, command_line, config_files, input_files, output_files, working_directory):
- """
- """
- self.client = client
- self.command_line = command_line
- self.config_files = config_files
- self.input_files = input_files
- self.output_files = output_files
- self.tool_id = tool.id
- self.tool_version = tool.version
- self.tool_dir = os.path.abspath(tool.tool_dir)
- self.working_directory = working_directory
-
- # Setup job inputs, these will need to be rewritten before
- # shipping off to remote LWR server.
- self.job_inputs = JobInputs(self.command_line, self.config_files)
-
- self.file_renames = {}
-
- self.__handle_setup()
- self.__initialize_referenced_tool_files()
- self.__upload_tool_files()
- self.__upload_input_files()
- self.__upload_working_directory_files()
- self.__initialize_output_file_renames()
- self.__initialize_task_output_file_renames()
- self.__initialize_config_file_renames()
- self.__handle_rewrites()
- self.__upload_rewritten_config_files()
-
- def __handle_setup(self):
- job_config = self.client.setup(self.tool_id, self.tool_version)
-
- self.new_working_directory = job_config['working_directory']
- self.new_outputs_directory = job_config['outputs_directory']
- self.remote_path_separator = job_config['path_separator']
- # If remote LWR server assigned job id, use that otherwise
- # just use local job_id assigned.
- galaxy_job_id = self.client.job_id
- self.job_id = job_config.get('job_id', galaxy_job_id)
- if self.job_id != galaxy_job_id:
- # Remote LWR server assigned an id different than the
- # Galaxy job id, update client to reflect this.
- self.client.job_id = self.job_id
-
- def __initialize_referenced_tool_files(self):
- self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
-
- def __upload_tool_files(self):
- for referenced_tool_file in self.referenced_tool_files:
- tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
- self.file_renames[referenced_tool_file] = tool_upload_response['path']
-
- def __upload_input_files(self):
- for input_file in self.input_files:
- self.__upload_input_file(input_file)
- self.__upload_input_extra_files(input_file)
-
- def __upload_input_file(self, input_file):
- if self.job_inputs.path_referenced(input_file):
- input_upload_response = self.client.upload_input(input_file)
- self.file_renames[input_file] = input_upload_response['path']
-
- def __upload_input_extra_files(self, input_file):
- # TODO: Determine if this is object store safe and what needs to be
- # done if it is not.
- files_path = "%s_files" % input_file[0:-len(".dat")]
- if os.path.exists(files_path) and self.job_inputs.path_referenced(files_path):
- for extra_file in os.listdir(files_path):
- extra_file_path = os.path.join(files_path, extra_file)
- relative_path = os.path.basename(files_path)
- extra_file_relative_path = os.path.join(relative_path, extra_file)
- response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
- self.file_renames[extra_file_path] = response['path']
-
- def __upload_working_directory_files(self):
- # Task manager stages files into working directory, these need to be
- # uploaded if present.
- for working_directory_file in os.listdir(self.working_directory):
- path = os.path.join(self.working_directory, working_directory_file)
- working_file_response = self.client.upload_working_directory_file(path)
- self.file_renames[path] = working_file_response['path']
-
- def __initialize_output_file_renames(self):
- for output_file in self.output_files:
- self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
- self.remote_path_separator,
- os.path.basename(output_file))
-
- def __initialize_task_output_file_renames(self):
- for output_file in self.output_files:
- name = os.path.basename(output_file)
- self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- name)
-
- def __initialize_config_file_renames(self):
- for config_file in self.config_files:
- self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- os.path.basename(config_file))
-
- def __rewrite_paths(self, contents):
- new_contents = contents
- for local_path, remote_path in self.file_renames.iteritems():
- new_contents = new_contents.replace(local_path, remote_path)
- return new_contents
-
- def __handle_rewrites(self):
- for local_path, remote_path in self.file_renames.iteritems():
- self.job_inputs.rewrite_paths(local_path, remote_path)
-
- def __upload_rewritten_config_files(self):
- for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
- self.client.upload_config_file(config_file, new_config_contents)
-
- def get_rewritten_command_line(self):
- """
- Returns the rewritten version of the command line to execute suitable
- for remote host.
- """
- return self.job_inputs.rewritten_command_line
-
-
-class parseJson(object):
-
- def __init__(self):
- pass
-
- def __call__(self, func):
- def replacement(*args, **kwargs):
- response = func(*args, **kwargs)
- return simplejson.loads(response)
- return replacement
-
-
-class Client(object):
- """
- Objects of this client class perform low-level communication with a remote LWR server.
-
- **Parameters**
-
- remote_host : str
- Remote URL of the LWR server.
- job_id : str
- Galaxy job/task id.
- private_key : str (optional)
- Secret key the remote LWR server is configured with.
- """
-
- def __init__(self, remote_host, job_id, private_key=None):
- if not remote_host.endswith("/"):
- remote_host = remote_host + "/"
- ## If we don't have an explicit private_key defined, check for
- ## one embedded in the URL. A URL of the form
- ## https://moo@cow:8913 will try to contact https://cow:8913
- ## with a private key of moo
- private_key_format = "https?://(.*)@.*/?"
- private_key_match = re.match(private_key_format, remote_host)
- if not private_key and private_key_match:
- private_key = private_key_match.group(1)
- remote_host = remote_host.replace("%s@" % private_key, '', 1)
- self.remote_host = remote_host
- self.job_id = job_id
- self.private_key = private_key
- self.transport = get_transport()
-
- def __build_url(self, command, args):
- if self.private_key:
- args["private_key"] = self.private_key
- data = urllib.urlencode(args)
- url = self.remote_host + command + "?" + data
- return url
-
- def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
- url = self.__build_url(command, args)
- response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
- return response
-
- @parseJson()
- def __upload_file(self, action, path, name=None, contents=None):
- if not name:
- name = os.path.basename(path)
- args = {"job_id": self.job_id, "name": name}
- input_path = path
- if contents:
- input_path = None
- return self.__raw_execute(action, args, contents, input_path)
-
- def upload_tool_file(self, path):
- """
- Upload a tool related file (e.g. wrapper) required to run job.
-
- **Parameters**
-
- path : str
- Local path tool.
- """
- return self.__upload_file("upload_tool_file", path)
-
- def upload_input(self, path):
- """
- Upload input dataset to remote server.
-
- **Parameters**
-
- path : str
- Local path of input dataset.
- """
- return self.__upload_file("upload_input", path)
-
- def upload_extra_input(self, path, relative_name):
- """
- Upload extra input file to remote server.
-
- **Parameters**
-
- path : str
- Extra files path of input dataset corresponding to this input.
- relative_name : str
- Relative path of extra file to upload relative to inputs extra files path.
- """
- return self.__upload_file("upload_extra_input", path, name=relative_name)
-
- def upload_config_file(self, path, contents):
- """
- Upload a job's config file to the remote server.
-
- **Parameters**
-
- path : str
- Local path to the original config file.
- contents : str
- Rewritten contents of the config file to upload.
- """
- return self.__upload_file("upload_config_file", path, contents=contents)
-
- def upload_working_directory_file(self, path):
- """
- Upload the supplied file (path) from a job's working directory
- to remote server.
-
- **Parameters**
-
- path : str
- Path to file to upload.
- """
- return self.__upload_file("upload_working_directory_file", path)
-
- @parseJson()
- def _get_output_type(self, name):
- return self.__raw_execute("get_output_type", {"name": name,
- "job_id": self.job_id})
-
- def download_work_dir_output(self, source, working_directory, output_path):
- """
- Download an output dataset specified with from_work_dir from the
- remote server.
-
- **Parameters**
-
- source : str
- Path in job's working_directory to find output in.
- working_directory : str
- Local working_directory for the job.
- output_path : str
- Full path to output dataset.
- """
- output = open(output_path, "wb")
- name = os.path.basename(source)
- self.__raw_download_output(name, self.job_id, "work_dir", output)
-
- def download_output(self, path, working_directory):
- """
- Download an output dataset from the remote server.
-
- **Parameters**
-
- path : str
- Local path of the dataset.
- working_directory : str
- Local working_directory for the job.
- """
- name = os.path.basename(path)
- output_type = self._get_output_type(name)
- if output_type == "direct":
- output_path = path
- elif output_type == "task":
- output_path = os.path.join(working_directory, name)
- else:
- raise Exception("No remote output found for dataset with path %s" % path)
- self.__raw_download_output(name, self.job_id, output_type, output_path)
-
- def __raw_download_output(self, name, job_id, output_type, output_path):
- self.__raw_execute("download_output",
- {"name": name,
- "job_id": self.job_id,
- "output_type": output_type},
- output_path=output_path)
-
- def launch(self, command_line):
- """
- Run or queue up the execution of the supplied
- `command_line` on the remote server.
-
- **Parameters**
-
- command_line : str
- Command to execute.
- """
- return self.__raw_execute("launch", {"command_line": command_line,
- "job_id": self.job_id})
-
- def kill(self):
- """
- Cancel remote job, either removing from the queue or killing it.
- """
- return self.__raw_execute("kill", {"job_id": self.job_id})
-
- def wait(self):
- """
- Wait for job to finish.
- """
- while True:
- complete_response = self.raw_check_complete()
- if complete_response["complete"] == "true":
- return complete_response
- time.sleep(1)
-
- @parseJson()
- def raw_check_complete(self):
- """
- Get check_complete response from the remote server.
- """
- check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
- return check_complete_response
-
- def check_complete(self):
- """
- Return boolean indicating whether the job is complete.
- """
- return self.raw_check_complete()["complete"] == "true"
-
- def clean(self):
- """
- Cleanup the remote job.
- """
- self.__raw_execute("clean", {"job_id": self.job_id})
-
- @parseJson()
- def setup(self, tool_id=None, tool_version=None):
- """
- Setup remote LWR server to run this job.
- """
- setup_args = {"job_id": self.job_id}
- if tool_id:
- setup_args["tool_id"] = tool_id
- if tool_version:
- setup_args["tool_version"] = tool_version
- return self.__raw_execute("setup", setup_args)
-
-
-def _read(path):
- """
- Utility method to quickly read small files (config files and tool
- wrappers) into memory as strings.
- """
- input = open(path, "r")
- try:
- return input.read()
- finally:
- input.close()
+__all__ = [Client, FileStager, url_to_destination_params]
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/lwr_client/client.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/client.py
@@ -0,0 +1,235 @@
+import os
+import time
+import urllib
+import simplejson
+
+from .transport import get_transport
+from .destination import url_to_destination_params
+
+
+class parseJson(object):
+
+ def __init__(self):
+ pass
+
+ def __call__(self, func):
+ def replacement(*args, **kwargs):
+ response = func(*args, **kwargs)
+ return simplejson.loads(response)
+ return replacement
+
+
+class Client(object):
+ """
+ Objects of this client class perform low-level communication with a remote LWR server.
+
+ **Parameters**
+
+ destination_params : dict or str
+ connection parameters, either url with dict containing url (and optionally `private_token`).
+ job_id : str
+ Galaxy job/task id.
+ """
+
+ def __init__(self, destination_params, job_id, transport_type=None):
+ if isinstance(destination_params, str) or isinstance(destination_params, unicode):
+ destination_params = url_to_destination_params(destination_params)
+ self.remote_host = destination_params.get("url")
+ assert self.remote_host != None, "Failed to determine url for LWR client."
+ self.private_key = destination_params.get("private_token", None)
+ self.job_id = job_id
+ self.transport = get_transport(transport_type)
+
+ def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
+ data = urllib.urlencode(args)
+ url = self.remote_host + command + "?" + data
+ return url
+
+ def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
+ url = self.__build_url(command, args)
+ response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
+ return response
+
+ @parseJson()
+ def __upload_file(self, action, path, name=None, contents=None):
+ if not name:
+ name = os.path.basename(path)
+ args = {"job_id": self.job_id, "name": name}
+ input_path = path
+ if contents:
+ input_path = None
+ return self.__raw_execute(action, args, contents, input_path)
+
+ def upload_tool_file(self, path):
+ """
+ Upload a tool related file (e.g. wrapper) required to run job.
+
+ **Parameters**
+
+ path : str
+ Local path tool.
+ """
+ return self.__upload_file("upload_tool_file", path)
+
+ def upload_input(self, path):
+ """
+ Upload input dataset to remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of input dataset.
+ """
+ return self.__upload_file("upload_input", path)
+
+ def upload_extra_input(self, path, relative_name):
+ """
+ Upload extra input file to remote server.
+
+ **Parameters**
+
+ path : str
+ Extra files path of input dataset corresponding to this input.
+ relative_name : str
+ Relative path of extra file to upload relative to inputs extra files path.
+ """
+ return self.__upload_file("upload_extra_input", path, name=relative_name)
+
+ def upload_config_file(self, path, contents):
+ """
+ Upload a job's config file to the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path to the original config file.
+ contents : str
+ Rewritten contents of the config file to upload.
+ """
+ return self.__upload_file("upload_config_file", path, contents=contents)
+
+ def upload_working_directory_file(self, path):
+ """
+ Upload the supplied file (path) from a job's working directory
+ to remote server.
+
+ **Parameters**
+
+ path : str
+ Path to file to upload.
+ """
+ return self.__upload_file("upload_working_directory_file", path)
+
+ @parseJson()
+ def _get_output_type(self, name):
+ return self.__raw_execute("get_output_type", {"name": name,
+ "job_id": self.job_id})
+
+ def download_work_dir_output(self, source, working_directory, output_path):
+ """
+ Download an output dataset specified with from_work_dir from the
+ remote server.
+
+ **Parameters**
+
+ source : str
+ Path in job's working_directory to find output in.
+ working_directory : str
+ Local working_directory for the job.
+ output_path : str
+ Full path to output dataset.
+ """
+ output = open(output_path, "wb")
+ name = os.path.basename(source)
+ self.__raw_download_output(name, self.job_id, "work_dir", output)
+
+ def download_output(self, path, working_directory):
+ """
+ Download an output dataset from the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of the dataset.
+ working_directory : str
+ Local working_directory for the job.
+ """
+ name = os.path.basename(path)
+ output_type = self._get_output_type(name)
+ if output_type == "direct":
+ output_path = path
+ elif output_type == "task":
+ output_path = os.path.join(working_directory, name)
+ else:
+ raise Exception("No remote output found for dataset with path %s" % path)
+ self.__raw_download_output(name, self.job_id, output_type, output_path)
+
+ def __raw_download_output(self, name, job_id, output_type, output_path):
+ self.__raw_execute("download_output",
+ {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type},
+ output_path=output_path)
+
+ def launch(self, command_line):
+ """
+ Run or queue up the execution of the supplied
+ `command_line` on the remote server.
+
+ **Parameters**
+
+ command_line : str
+ Command to execute.
+ """
+ return self.__raw_execute("launch", {"command_line": command_line,
+ "job_id": self.job_id})
+
+ def kill(self):
+ """
+ Cancel remote job, either removing from the queue or killing it.
+ """
+ return self.__raw_execute("kill", {"job_id": self.job_id})
+
+ def wait(self):
+ """
+ Wait for job to finish.
+ """
+ while True:
+ complete_response = self.raw_check_complete()
+ if complete_response["complete"] == "true":
+ return complete_response
+ time.sleep(1)
+
+ @parseJson()
+ def raw_check_complete(self):
+ """
+ Get check_complete response from the remote server.
+ """
+ check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
+ return check_complete_response
+
+ def check_complete(self):
+ """
+ Return boolean indicating whether the job is complete.
+ """
+ return self.raw_check_complete()["complete"] == "true"
+
+ def clean(self):
+ """
+ Cleanup the remote job.
+ """
+ self.__raw_execute("clean", {"job_id": self.job_id})
+
+ @parseJson()
+ def setup(self, tool_id=None, tool_version=None):
+ """
+ Setup remote LWR server to run this job.
+ """
+ setup_args = {"job_id": self.job_id}
+ if tool_id:
+ setup_args["tool_id"] = tool_id
+ if tool_version:
+ setup_args["tool_version"] = tool_version
+ return self.__raw_execute("setup", setup_args)
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/lwr_client/destination.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/destination.py
@@ -0,0 +1,44 @@
+
+from re import match
+
+
+def url_to_destination_params(url):
+ """Convert a legacy runner URL to a job destination
+
+ >>> params_simple = url_to_destination_params("http://localhost:8913/")
+ >>> params_simple["url"]
+ 'http://localhost:8913/'
+ >>> params_simple["private_token"] is None
+ True
+ >>> advanced_url = "https://1234x@example.com:8914/managers/longqueue"
+ >>> params_advanced = url_to_destination_params(advanced_url)
+ >>> params_advanced["url"]
+ 'https://example.com:8914/managers/longqueue/'
+ >>> params_advanced["private_token"]
+ '1234x'
+ >>> runner_url = "lwr://http://localhost:8913/"
+ >>> runner_params = url_to_destination_params(runner_url)
+ >>> runner_params['url']
+ 'http://localhost:8913/'
+ """
+
+ if url.startswith("lwr://"):
+ url = url[len("lwr://"):]
+
+ if not url.endswith("/"):
+ url += "/"
+
+ ## Check for private token embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_token_format = "https?://(.*)@.*/?"
+ private_token_match = match(private_token_format, url)
+ private_token = None
+ if private_token_match:
+ private_token = private_token_match.group(1)
+ url = url.replace("%s@" % private_token, '', 1)
+
+ destination_args = {"url": url,
+ "private_token": private_token}
+
+ return destination_args
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/lwr_client/stager.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/stager.py
@@ -0,0 +1,256 @@
+
+import os
+from re import findall
+
+
+class JobInputs(object):
+ """
+ Abstractions over dynamic inputs created for a given job (namely the command to
+ execute and created configfiles).
+
+ **Parameters**
+
+ command_line : str
+ Local command to execute for this job. (To be rewritten.)
+ config_files : str
+ Config files created for this job. (To be rewritten.)
+
+
+ >>> import tempfile
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> def setup_inputs(tf):
+ ... open(tf.name, "w").write("world /path/to/input the rest")
+ ... inputs = JobInputs("hello /path/to/input", [tf.name])
+ ... return inputs
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
+ >>> inputs.rewritten_command_line
+ 'hello C:\\\\input'
+ >>> inputs.rewritten_config_files[tf.name]
+ 'world C:\\\\input the rest'
+ >>> tf.close()
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.find_referenced_subfiles('/path/to')
+ ['/path/to/input']
+ >>> inputs.path_referenced('/path/to')
+ True
+ >>> inputs.path_referenced('/path/to/input')
+ True
+ >>> inputs.path_referenced('/path/to/notinput')
+ False
+ >>> tf.close()
+ """
+
+ def __init__(self, command_line, config_files):
+ self.rewritten_command_line = command_line
+ self.rewritten_config_files = {}
+ for config_file in config_files or []:
+ config_contents = _read(config_file)
+ self.rewritten_config_files[config_file] = config_contents
+
+ def find_referenced_subfiles(self, directory):
+ """
+ Return list of files below specified `directory` in job inputs. Could
+ use more sophisticated logic (match quotes to handle spaces, handle
+ subdirectories, etc...).
+
+ **Parameters**
+
+ directory : str
+ Full path to directory to search.
+
+ """
+ pattern = r"(%s%s\S+)" % (directory, os.sep)
+ referenced_files = set()
+ for input_contents in self.__items():
+ referenced_files.update(findall(pattern, input_contents))
+ return list(referenced_files)
+
+ def path_referenced(self, path):
+ pattern = r"%s" % path
+ found = False
+ for input_contents in self.__items():
+ if findall(pattern, input_contents):
+ found = True
+ break
+ return found
+
+ def rewrite_paths(self, local_path, remote_path):
+ """
+ Rewrite references to `local_path` with `remote_path` in job inputs.
+ """
+ self.__rewrite_command_line(local_path, remote_path)
+ self.__rewrite_config_files(local_path, remote_path)
+
+ def __rewrite_command_line(self, local_path, remote_path):
+ self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
+
+ def __rewrite_config_files(self, local_path, remote_path):
+ for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
+ self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
+
+ def __items(self):
+ items = [self.rewritten_command_line]
+ items.extend(self.rewritten_config_files.values())
+ return items
+
+
+class FileStager(object):
+ """
+ Objects of the FileStager class interact with an LWR client object to
+ stage the files required to run jobs on a remote LWR server.
+
+ **Parameters**
+
+ client : Client
+ LWR client object.
+ command_line : str
+ The local command line to execute, this will be rewritten for the remote server.
+ config_files : list
+ List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
+ input_files : list
+ List of input files used by job. These will be transferred and references rewritten.
+ output_files : list
+ List of output_files produced by job.
+ tool_dir : str
+ Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
+ working_directory : str
+ Local path created by Galaxy for running this job.
+
+ """
+
+ def __init__(self, client, tool, command_line, config_files, input_files, output_files, working_directory):
+ """
+ """
+ self.client = client
+ self.command_line = command_line
+ self.config_files = config_files
+ self.input_files = input_files
+ self.output_files = output_files
+ self.tool_id = tool.id
+ self.tool_version = tool.version
+ self.tool_dir = os.path.abspath(tool.tool_dir)
+ self.working_directory = working_directory
+
+ # Setup job inputs, these will need to be rewritten before
+ # shipping off to remote LWR server.
+ self.job_inputs = JobInputs(self.command_line, self.config_files)
+
+ self.file_renames = {}
+
+ self.__handle_setup()
+ self.__initialize_referenced_tool_files()
+ self.__upload_tool_files()
+ self.__upload_input_files()
+ self.__upload_working_directory_files()
+ self.__initialize_output_file_renames()
+ self.__initialize_task_output_file_renames()
+ self.__initialize_config_file_renames()
+ self.__handle_rewrites()
+ self.__upload_rewritten_config_files()
+
+ def __handle_setup(self):
+ job_config = self.client.setup(self.tool_id, self.tool_version)
+
+ self.new_working_directory = job_config['working_directory']
+ self.new_outputs_directory = job_config['outputs_directory']
+ self.remote_path_separator = job_config['path_separator']
+ # If remote LWR server assigned job id, use that otherwise
+ # just use local job_id assigned.
+ galaxy_job_id = self.client.job_id
+ self.job_id = job_config.get('job_id', galaxy_job_id)
+ if self.job_id != galaxy_job_id:
+ # Remote LWR server assigned an id different than the
+ # Galaxy job id, update client to reflect this.
+ self.client.job_id = self.job_id
+
+ def __initialize_referenced_tool_files(self):
+ self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
+
+ def __upload_tool_files(self):
+ for referenced_tool_file in self.referenced_tool_files:
+ tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
+ self.file_renames[referenced_tool_file] = tool_upload_response['path']
+
+ def __upload_input_files(self):
+ for input_file in self.input_files:
+ self.__upload_input_file(input_file)
+ self.__upload_input_extra_files(input_file)
+
+ def __upload_input_file(self, input_file):
+ if self.job_inputs.path_referenced(input_file):
+ input_upload_response = self.client.upload_input(input_file)
+ self.file_renames[input_file] = input_upload_response['path']
+
+ def __upload_input_extra_files(self, input_file):
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path) and self.job_inputs.path_referenced(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
+
+ def __upload_working_directory_files(self):
+ # Task manager stages files into working directory, these need to be
+ # uploaded if present.
+ for working_directory_file in os.listdir(self.working_directory):
+ path = os.path.join(self.working_directory, working_directory_file)
+ working_file_response = self.client.upload_working_directory_file(path)
+ self.file_renames[path] = working_file_response['path']
+
+ def __initialize_output_file_renames(self):
+ for output_file in self.output_files:
+ self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
+ self.remote_path_separator,
+ os.path.basename(output_file))
+
+ def __initialize_task_output_file_renames(self):
+ for output_file in self.output_files:
+ name = os.path.basename(output_file)
+ self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ name)
+
+ def __initialize_config_file_renames(self):
+ for config_file in self.config_files:
+ self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ os.path.basename(config_file))
+
+ def __rewrite_paths(self, contents):
+ new_contents = contents
+ for local_path, remote_path in self.file_renames.iteritems():
+ new_contents = new_contents.replace(local_path, remote_path)
+ return new_contents
+
+ def __handle_rewrites(self):
+ for local_path, remote_path in self.file_renames.iteritems():
+ self.job_inputs.rewrite_paths(local_path, remote_path)
+
+ def __upload_rewritten_config_files(self):
+ for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
+ self.client.upload_config_file(config_file, new_config_contents)
+
+ def get_rewritten_command_line(self):
+ """
+ Returns the rewritten version of the command line to execute suitable
+ for remote host.
+ """
+ return self.job_inputs.rewritten_command_line
+
+
+def _read(path):
+ """
+ Utility method to quickly read small files (config files and tool
+ wrappers) into memory as strings.
+ """
+ input = open(path, "r")
+ try:
+ return input.read()
+ finally:
+ input.close()
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
@@ -3,14 +3,24 @@
import os
-def get_transport(os_module=os):
- use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0")
- ## If LWR_CURL_TRANSPORT is unset or set to 0, use default,
- ## else use curl.
- if use_curl.isdigit() and not int(use_curl):
- return Urllib2Transport()
+def get_transport(transport_type=None, os_module=os):
+ transport_type = __get_transport_type(transport_type, os_module)
+ if transport_type == 'urllib':
+ transport = Urllib2Transport()
else:
- return PycurlTransport()
+ transport = PycurlTransport()
+ return transport
+def __get_transport_type(transport_type, os_module):
+ if not transport_type:
+ use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0")
+ ## If LWR_CURL_TRANSPORT is unset or set to 0, use default,
+ ## else use curl.
+ if use_curl.isdigit() and not int(use_curl):
+ transport_type = 'urllib'
+ else:
+ transport_type = 'curl'
+ return transport_type
+
__all__ = [get_transport]
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -173,7 +173,9 @@
return JobDestination(runner='pbs', params=params)
def parse_destination_params(self, params):
- """A wrapper method around __args_to_attrs() that allow administrators to define PBS params as either command-line options (as in ``qsub(1B)``) or more human-readable "long" args (as in ``pbs_submit(3B)``).
+ """A wrapper method around __args_to_attrs() that allow administrators to define PBS
+ params as either command-line options (as in ``qsub(1B)``) or more human-readable "long"
+ args (as in ``pbs_submit(3B)``).
:returns: list of dicts -- The dicts map directly to pbs attropl structs (see ``pbs_submit(3B)``)
"""
@@ -185,7 +187,7 @@
arg = arg.lstrip('-')
args[arg] = value
except:
- log.warning('Unrecognized long argument in destination params: %s' % k)
+ log.warning('Unrecognized long argument in destination params: %s' % arg)
return self.__args_to_attrs(args)
# Internal stuff
diff -r e58bb74b6afe3ebac548a2b2d124c24bf149a24a -r e0607a09d9fa9eb690e9b410d9c167072db0b55a lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -9,12 +9,13 @@
from time import sleep
from galaxy.jobs import TaskWrapper
+from galaxy.jobs.runners import BaseJobRunner
log = logging.getLogger( __name__ )
__all__ = [ 'TaskedJobRunner' ]
-class TaskedJobRunner( object ):
+class TaskedJobRunner( BaseJobRunner ):
"""
Job runner backed by a finite pool of worker threads. FIFO scheduling
"""
@@ -25,7 +26,7 @@
self._init_worker_threads()
def queue_job( self, job_wrapper ):
- super( LocalJobRunner, self ).queue_job( job_wrapper )
+ super( TaskedJobRunner, self ).queue_job( job_wrapper )
if not job_wrapper.is_ready:
return
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
8 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/547d6844325a/
changeset: 547d6844325a
user: jmchilton
date: 2013-03-10 06:51:59
summary: Update LWR infrastructure for new destination based configuration of Galaxy runners. Implement legacy mapping functionality. Break lwr_client/__init__.py into separate smaller modules.
Updates LWR client to LWR changeset https://bitbucket.org/jmchilton/lwr/commits/b0fbc1a299baa470b9346127767fcbe….
affected #: 5 files
diff -r 2b4d6af45a04ddb49c2c87a0ead3874503d35832 -r 547d6844325ad305cc5d713365e3d66d75bbb5c1 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -3,11 +3,12 @@
from galaxy import model
from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner
+from galaxy.jobs import JobDestination
import errno
from time import sleep
-from lwr_client import FileStager, Client
+from lwr_client import FileStager, Client, url_to_destination_params
log = logging.getLogger( __name__ )
@@ -25,6 +26,10 @@
self._init_monitor_thread()
self._init_worker_threads()
+ def url_to_destination( self, url ):
+ """Convert a legacy URL to a job destination"""
+ return JobDestination( runner="lwr", params=url_to_destination_params( url ) )
+
def check_watched_item(self, job_state):
try:
client = self.get_client_from_state(job_state)
@@ -42,7 +47,7 @@
def queue_job(self, job_wrapper):
stderr = stdout = command_line = ''
- runner_url = job_wrapper.get_job_runner_url()
+ job_destination = job_wrapper.job_destination
try:
job_wrapper.prepare()
@@ -74,7 +79,7 @@
rebuilt_command_line = file_stager.get_rewritten_command_line()
job_id = file_stager.job_id
client.launch( rebuilt_command_line )
- job_wrapper.set_runner( runner_url, job_id )
+ job_wrapper.set_job_destination( job_destination, job_id )
job_wrapper.change_state( model.Job.states.RUNNING )
except Exception, exc:
@@ -87,32 +92,26 @@
lwr_job_state.job_id = job_id
lwr_job_state.old_state = True
lwr_job_state.running = True
- lwr_job_state.runner_url = runner_url
+ lwr_job_state.job_destination = job_destination
self.monitor_job(lwr_job_state)
def get_output_files(self, job_wrapper):
output_fnames = job_wrapper.get_output_fnames()
return [ str( o ) for o in output_fnames ]
-
- def determine_lwr_url(self, url):
- lwr_url = url[ len( 'lwr://' ) : ]
- return lwr_url
-
def get_client_from_wrapper(self, job_wrapper):
job_id = job_wrapper.job_id
if hasattr(job_wrapper, 'task_id'):
job_id = "%s_%s" % (job_id, job_wrapper.task_id)
- return self.get_client( job_wrapper.get_job_runner_url(), job_id )
+ return self.get_client( job_wrapper.job_destination.params, job_id )
def get_client_from_state(self, job_state):
- job_runner = job_state.runner_url
+ job_destination_params = job_state.job_destination.params
job_id = job_state.job_id
- return self.get_client(job_runner, job_id)
+ return self.get_client( job_destination_params, job_id )
- def get_client(self, job_runner, job_id):
- lwr_url = self.determine_lwr_url( job_runner )
- return Client(lwr_url, job_id)
+ def get_client( self, job_destination_params, job_id ):
+ return Client( job_destination_params, job_id )
def finish_job( self, job_state ):
stderr = stdout = command_line = ''
@@ -210,7 +209,7 @@
lwr_url = job.job_runner_name
job_id = job.job_runner_external_id
log.debug("Attempt remote lwr kill of job with url %s and id %s" % (lwr_url, job_id))
- client = self.get_client(lwr_url, job_id)
+ client = self.get_client(job.destination_params, job_id)
client.kill()
@@ -219,6 +218,7 @@
job_state = AsynchronousJobState()
job_state.job_id = str( job.get_job_runner_external_id() )
job_state.runner_url = job_wrapper.get_job_runner_url()
+ job_state.job_destination = job_wrapper.job_destination
job_wrapper.command_line = job.get_command_line()
job_state.job_wrapper = job_wrapper
if job.get_state() == model.Job.states.RUNNING:
diff -r 2b4d6af45a04ddb49c2c87a0ead3874503d35832 -r 547d6844325ad305cc5d713365e3d66d75bbb5c1 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -5,502 +5,9 @@
This module contains logic for interfacing with an external LWR server.
"""
-import os
-import re
-import time
-import urllib
-import simplejson
+from .stager import FileStager
+from .client import Client
+from .destination import url_to_destination_params
-from transport import get_transport
-
-
-class JobInputs(object):
- """
- Abstractions over dynamic inputs created for a given job (namely the command to
- execute and created configfiles).
-
- **Parameters**
-
- command_line : str
- Local command to execute for this job. (To be rewritten.)
- config_files : str
- Config files created for this job. (To be rewritten.)
-
-
- >>> import tempfile
- >>> tf = tempfile.NamedTemporaryFile()
- >>> def setup_inputs(tf):
- ... open(tf.name, "w").write("world /path/to/input the rest")
- ... inputs = JobInputs("hello /path/to/input", [tf.name])
- ... return inputs
- >>> inputs = setup_inputs(tf)
- >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
- >>> inputs.rewritten_command_line
- 'hello C:\\\\input'
- >>> inputs.rewritten_config_files[tf.name]
- 'world C:\\\\input the rest'
- >>> tf.close()
- >>> tf = tempfile.NamedTemporaryFile()
- >>> inputs = setup_inputs(tf)
- >>> inputs.find_referenced_subfiles('/path/to')
- ['/path/to/input']
- >>> inputs.path_referenced('/path/to')
- True
- >>> inputs.path_referenced('/path/to/input')
- True
- >>> inputs.path_referenced('/path/to/notinput')
- False
- >>> tf.close()
- """
-
- def __init__(self, command_line, config_files):
- self.rewritten_command_line = command_line
- self.rewritten_config_files = {}
- for config_file in config_files or []:
- config_contents = _read(config_file)
- self.rewritten_config_files[config_file] = config_contents
-
- def find_referenced_subfiles(self, directory):
- """
- Return list of files below specified `directory` in job inputs. Could
- use more sophisticated logic (match quotes to handle spaces, handle
- subdirectories, etc...).
-
- **Parameters**
-
- directory : str
- Full path to directory to search.
-
- """
- pattern = r"(%s%s\S+)" % (directory, os.sep)
- referenced_files = set()
- for input_contents in self.__items():
- referenced_files.update(re.findall(pattern, input_contents))
- return list(referenced_files)
-
- def path_referenced(self, path):
- pattern = r"%s" % path
- found = False
- for input_contents in self.__items():
- if re.findall(pattern, input_contents):
- found = True
- break
- return found
-
- def rewrite_paths(self, local_path, remote_path):
- """
- Rewrite references to `local_path` with `remote_path` in job inputs.
- """
- self.__rewrite_command_line(local_path, remote_path)
- self.__rewrite_config_files(local_path, remote_path)
-
- def __rewrite_command_line(self, local_path, remote_path):
- self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
-
- def __rewrite_config_files(self, local_path, remote_path):
- for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
- self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
-
- def __items(self):
- items = [self.rewritten_command_line]
- items.extend(self.rewritten_config_files.values())
- return items
-
-
-class FileStager(object):
- """
- Objects of the FileStager class interact with an LWR client object to
- stage the files required to run jobs on a remote LWR server.
-
- **Parameters**
-
- client : Client
- LWR client object.
- command_line : str
- The local command line to execute, this will be rewritten for the remote server.
- config_files : list
- List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
- input_files : list
- List of input files used by job. These will be transferred and references rewritten.
- output_files : list
- List of output_files produced by job.
- tool_dir : str
- Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
- working_directory : str
- Local path created by Galaxy for running this job.
-
- """
-
- def __init__(self, client, tool, command_line, config_files, input_files, output_files, working_directory):
- """
- """
- self.client = client
- self.command_line = command_line
- self.config_files = config_files
- self.input_files = input_files
- self.output_files = output_files
- self.tool_id = tool.id
- self.tool_version = tool.version
- self.tool_dir = os.path.abspath(tool.tool_dir)
- self.working_directory = working_directory
-
- # Setup job inputs, these will need to be rewritten before
- # shipping off to remote LWR server.
- self.job_inputs = JobInputs(self.command_line, self.config_files)
-
- self.file_renames = {}
-
- self.__handle_setup()
- self.__initialize_referenced_tool_files()
- self.__upload_tool_files()
- self.__upload_input_files()
- self.__upload_working_directory_files()
- self.__initialize_output_file_renames()
- self.__initialize_task_output_file_renames()
- self.__initialize_config_file_renames()
- self.__handle_rewrites()
- self.__upload_rewritten_config_files()
-
- def __handle_setup(self):
- job_config = self.client.setup(self.tool_id, self.tool_version)
-
- self.new_working_directory = job_config['working_directory']
- self.new_outputs_directory = job_config['outputs_directory']
- self.remote_path_separator = job_config['path_separator']
- # If remote LWR server assigned job id, use that otherwise
- # just use local job_id assigned.
- galaxy_job_id = self.client.job_id
- self.job_id = job_config.get('job_id', galaxy_job_id)
- if self.job_id != galaxy_job_id:
- # Remote LWR server assigned an id different than the
- # Galaxy job id, update client to reflect this.
- self.client.job_id = self.job_id
-
- def __initialize_referenced_tool_files(self):
- self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
-
- def __upload_tool_files(self):
- for referenced_tool_file in self.referenced_tool_files:
- tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
- self.file_renames[referenced_tool_file] = tool_upload_response['path']
-
- def __upload_input_files(self):
- for input_file in self.input_files:
- self.__upload_input_file(input_file)
- self.__upload_input_extra_files(input_file)
-
- def __upload_input_file(self, input_file):
- if self.job_inputs.path_referenced(input_file):
- input_upload_response = self.client.upload_input(input_file)
- self.file_renames[input_file] = input_upload_response['path']
-
- def __upload_input_extra_files(self, input_file):
- # TODO: Determine if this is object store safe and what needs to be
- # done if it is not.
- files_path = "%s_files" % input_file[0:-len(".dat")]
- if os.path.exists(files_path) and self.job_inputs.path_referenced(files_path):
- for extra_file in os.listdir(files_path):
- extra_file_path = os.path.join(files_path, extra_file)
- relative_path = os.path.basename(files_path)
- extra_file_relative_path = os.path.join(relative_path, extra_file)
- response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
- self.file_renames[extra_file_path] = response['path']
-
- def __upload_working_directory_files(self):
- # Task manager stages files into working directory, these need to be
- # uploaded if present.
- for working_directory_file in os.listdir(self.working_directory):
- path = os.path.join(self.working_directory, working_directory_file)
- working_file_response = self.client.upload_working_directory_file(path)
- self.file_renames[path] = working_file_response['path']
-
- def __initialize_output_file_renames(self):
- for output_file in self.output_files:
- self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
- self.remote_path_separator,
- os.path.basename(output_file))
-
- def __initialize_task_output_file_renames(self):
- for output_file in self.output_files:
- name = os.path.basename(output_file)
- self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- name)
-
- def __initialize_config_file_renames(self):
- for config_file in self.config_files:
- self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- os.path.basename(config_file))
-
- def __rewrite_paths(self, contents):
- new_contents = contents
- for local_path, remote_path in self.file_renames.iteritems():
- new_contents = new_contents.replace(local_path, remote_path)
- return new_contents
-
- def __handle_rewrites(self):
- for local_path, remote_path in self.file_renames.iteritems():
- self.job_inputs.rewrite_paths(local_path, remote_path)
-
- def __upload_rewritten_config_files(self):
- for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
- self.client.upload_config_file(config_file, new_config_contents)
-
- def get_rewritten_command_line(self):
- """
- Returns the rewritten version of the command line to execute suitable
- for remote host.
- """
- return self.job_inputs.rewritten_command_line
-
-
-class parseJson(object):
-
- def __init__(self):
- pass
-
- def __call__(self, func):
- def replacement(*args, **kwargs):
- response = func(*args, **kwargs)
- return simplejson.loads(response)
- return replacement
-
-
-class Client(object):
- """
- Objects of this client class perform low-level communication with a remote LWR server.
-
- **Parameters**
-
- remote_host : str
- Remote URL of the LWR server.
- job_id : str
- Galaxy job/task id.
- private_key : str (optional)
- Secret key the remote LWR server is configured with.
- """
-
- def __init__(self, remote_host, job_id, private_key=None):
- if not remote_host.endswith("/"):
- remote_host = remote_host + "/"
- ## If we don't have an explicit private_key defined, check for
- ## one embedded in the URL. A URL of the form
- ## https://moo@cow:8913 will try to contact https://cow:8913
- ## with a private key of moo
- private_key_format = "https?://(.*)@.*/?"
- private_key_match = re.match(private_key_format, remote_host)
- if not private_key and private_key_match:
- private_key = private_key_match.group(1)
- remote_host = remote_host.replace("%s@" % private_key, '', 1)
- self.remote_host = remote_host
- self.job_id = job_id
- self.private_key = private_key
- self.transport = get_transport()
-
- def __build_url(self, command, args):
- if self.private_key:
- args["private_key"] = self.private_key
- data = urllib.urlencode(args)
- url = self.remote_host + command + "?" + data
- return url
-
- def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
- url = self.__build_url(command, args)
- response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
- return response
-
- @parseJson()
- def __upload_file(self, action, path, name=None, contents=None):
- if not name:
- name = os.path.basename(path)
- args = {"job_id": self.job_id, "name": name}
- input_path = path
- if contents:
- input_path = None
- return self.__raw_execute(action, args, contents, input_path)
-
- def upload_tool_file(self, path):
- """
- Upload a tool related file (e.g. wrapper) required to run job.
-
- **Parameters**
-
- path : str
- Local path tool.
- """
- return self.__upload_file("upload_tool_file", path)
-
- def upload_input(self, path):
- """
- Upload input dataset to remote server.
-
- **Parameters**
-
- path : str
- Local path of input dataset.
- """
- return self.__upload_file("upload_input", path)
-
- def upload_extra_input(self, path, relative_name):
- """
- Upload extra input file to remote server.
-
- **Parameters**
-
- path : str
- Extra files path of input dataset corresponding to this input.
- relative_name : str
- Relative path of extra file to upload relative to inputs extra files path.
- """
- return self.__upload_file("upload_extra_input", path, name=relative_name)
-
- def upload_config_file(self, path, contents):
- """
- Upload a job's config file to the remote server.
-
- **Parameters**
-
- path : str
- Local path to the original config file.
- contents : str
- Rewritten contents of the config file to upload.
- """
- return self.__upload_file("upload_config_file", path, contents=contents)
-
- def upload_working_directory_file(self, path):
- """
- Upload the supplied file (path) from a job's working directory
- to remote server.
-
- **Parameters**
-
- path : str
- Path to file to upload.
- """
- return self.__upload_file("upload_working_directory_file", path)
-
- @parseJson()
- def _get_output_type(self, name):
- return self.__raw_execute("get_output_type", {"name": name,
- "job_id": self.job_id})
-
- def download_work_dir_output(self, source, working_directory, output_path):
- """
- Download an output dataset specified with from_work_dir from the
- remote server.
-
- **Parameters**
-
- source : str
- Path in job's working_directory to find output in.
- working_directory : str
- Local working_directory for the job.
- output_path : str
- Full path to output dataset.
- """
- output = open(output_path, "wb")
- name = os.path.basename(source)
- self.__raw_download_output(name, self.job_id, "work_dir", output)
-
- def download_output(self, path, working_directory):
- """
- Download an output dataset from the remote server.
-
- **Parameters**
-
- path : str
- Local path of the dataset.
- working_directory : str
- Local working_directory for the job.
- """
- name = os.path.basename(path)
- output_type = self._get_output_type(name)
- if output_type == "direct":
- output_path = path
- elif output_type == "task":
- output_path = os.path.join(working_directory, name)
- else:
- raise Exception("No remote output found for dataset with path %s" % path)
- self.__raw_download_output(name, self.job_id, output_type, output_path)
-
- def __raw_download_output(self, name, job_id, output_type, output_path):
- self.__raw_execute("download_output",
- {"name": name,
- "job_id": self.job_id,
- "output_type": output_type},
- output_path=output_path)
-
- def launch(self, command_line):
- """
- Run or queue up the execution of the supplied
- `command_line` on the remote server.
-
- **Parameters**
-
- command_line : str
- Command to execute.
- """
- return self.__raw_execute("launch", {"command_line": command_line,
- "job_id": self.job_id})
-
- def kill(self):
- """
- Cancel remote job, either removing from the queue or killing it.
- """
- return self.__raw_execute("kill", {"job_id": self.job_id})
-
- def wait(self):
- """
- Wait for job to finish.
- """
- while True:
- complete_response = self.raw_check_complete()
- if complete_response["complete"] == "true":
- return complete_response
- time.sleep(1)
-
- @parseJson()
- def raw_check_complete(self):
- """
- Get check_complete response from the remote server.
- """
- check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
- return check_complete_response
-
- def check_complete(self):
- """
- Return boolean indicating whether the job is complete.
- """
- return self.raw_check_complete()["complete"] == "true"
-
- def clean(self):
- """
- Cleanup the remote job.
- """
- self.__raw_execute("clean", {"job_id": self.job_id})
-
- @parseJson()
- def setup(self, tool_id=None, tool_version=None):
- """
- Setup remote LWR server to run this job.
- """
- setup_args = {"job_id": self.job_id}
- if tool_id:
- setup_args["tool_id"] = tool_id
- if tool_version:
- setup_args["tool_version"] = tool_version
- return self.__raw_execute("setup", setup_args)
-
-
-def _read(path):
- """
- Utility method to quickly read small files (config files and tool
- wrappers) into memory as strings.
- """
- input = open(path, "r")
- try:
- return input.read()
- finally:
- input.close()
+__all__ = [Client, FileStager, url_to_destination_params]
diff -r 2b4d6af45a04ddb49c2c87a0ead3874503d35832 -r 547d6844325ad305cc5d713365e3d66d75bbb5c1 lib/galaxy/jobs/runners/lwr_client/client.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/client.py
@@ -0,0 +1,235 @@
+import os
+import time
+import urllib
+import simplejson
+
+from .transport import get_transport
+from .destination import url_to_destination_params
+
+
+class parseJson(object):
+
+ def __init__(self):
+ pass
+
+ def __call__(self, func):
+ def replacement(*args, **kwargs):
+ response = func(*args, **kwargs)
+ return simplejson.loads(response)
+ return replacement
+
+
+class Client(object):
+ """
+ Objects of this client class perform low-level communication with a remote LWR server.
+
+ **Parameters**
+
+ destination_params : dict or str
+ connection parameters, either url with dict containing url (and optionally `private_token`).
+ job_id : str
+ Galaxy job/task id.
+ """
+
+ def __init__(self, destination_params, job_id):
+ if isinstance(destination_params, str) or isinstance(destination_params, unicode):
+ destination_params = url_to_destination_params(destination_params)
+ self.remote_host = destination_params.get("url")
+ assert self.remote_host != None, "Failed to determine url for LWR client."
+ self.private_key = destination_params.get("private_token", None)
+ self.job_id = job_id
+ self.transport = get_transport()
+
+ def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
+ data = urllib.urlencode(args)
+ url = self.remote_host + command + "?" + data
+ return url
+
+ def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
+ url = self.__build_url(command, args)
+ response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
+ return response
+
+ @parseJson()
+ def __upload_file(self, action, path, name=None, contents=None):
+ if not name:
+ name = os.path.basename(path)
+ args = {"job_id": self.job_id, "name": name}
+ input_path = path
+ if contents:
+ input_path = None
+ return self.__raw_execute(action, args, contents, input_path)
+
+ def upload_tool_file(self, path):
+ """
+ Upload a tool related file (e.g. wrapper) required to run job.
+
+ **Parameters**
+
+ path : str
+ Local path tool.
+ """
+ return self.__upload_file("upload_tool_file", path)
+
+ def upload_input(self, path):
+ """
+ Upload input dataset to remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of input dataset.
+ """
+ return self.__upload_file("upload_input", path)
+
+ def upload_extra_input(self, path, relative_name):
+ """
+ Upload extra input file to remote server.
+
+ **Parameters**
+
+ path : str
+ Extra files path of input dataset corresponding to this input.
+ relative_name : str
+ Relative path of extra file to upload relative to inputs extra files path.
+ """
+ return self.__upload_file("upload_extra_input", path, name=relative_name)
+
+ def upload_config_file(self, path, contents):
+ """
+ Upload a job's config file to the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path to the original config file.
+ contents : str
+ Rewritten contents of the config file to upload.
+ """
+ return self.__upload_file("upload_config_file", path, contents=contents)
+
+ def upload_working_directory_file(self, path):
+ """
+ Upload the supplied file (path) from a job's working directory
+ to remote server.
+
+ **Parameters**
+
+ path : str
+ Path to file to upload.
+ """
+ return self.__upload_file("upload_working_directory_file", path)
+
+ @parseJson()
+ def _get_output_type(self, name):
+ return self.__raw_execute("get_output_type", {"name": name,
+ "job_id": self.job_id})
+
+ def download_work_dir_output(self, source, working_directory, output_path):
+ """
+ Download an output dataset specified with from_work_dir from the
+ remote server.
+
+ **Parameters**
+
+ source : str
+ Path in job's working_directory to find output in.
+ working_directory : str
+ Local working_directory for the job.
+ output_path : str
+ Full path to output dataset.
+ """
+ output = open(output_path, "wb")
+ name = os.path.basename(source)
+ self.__raw_download_output(name, self.job_id, "work_dir", output)
+
+ def download_output(self, path, working_directory):
+ """
+ Download an output dataset from the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of the dataset.
+ working_directory : str
+ Local working_directory for the job.
+ """
+ name = os.path.basename(path)
+ output_type = self._get_output_type(name)
+ if output_type == "direct":
+ output_path = path
+ elif output_type == "task":
+ output_path = os.path.join(working_directory, name)
+ else:
+ raise Exception("No remote output found for dataset with path %s" % path)
+ self.__raw_download_output(name, self.job_id, output_type, output_path)
+
+ def __raw_download_output(self, name, job_id, output_type, output_path):
+ self.__raw_execute("download_output",
+ {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type},
+ output_path=output_path)
+
+ def launch(self, command_line):
+ """
+ Run or queue up the execution of the supplied
+ `command_line` on the remote server.
+
+ **Parameters**
+
+ command_line : str
+ Command to execute.
+ """
+ return self.__raw_execute("launch", {"command_line": command_line,
+ "job_id": self.job_id})
+
+ def kill(self):
+ """
+ Cancel remote job, either removing from the queue or killing it.
+ """
+ return self.__raw_execute("kill", {"job_id": self.job_id})
+
+ def wait(self):
+ """
+ Wait for job to finish.
+ """
+ while True:
+ complete_response = self.raw_check_complete()
+ if complete_response["complete"] == "true":
+ return complete_response
+ time.sleep(1)
+
+ @parseJson()
+ def raw_check_complete(self):
+ """
+ Get check_complete response from the remote server.
+ """
+ check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
+ return check_complete_response
+
+ def check_complete(self):
+ """
+ Return boolean indicating whether the job is complete.
+ """
+ return self.raw_check_complete()["complete"] == "true"
+
+ def clean(self):
+ """
+ Cleanup the remote job.
+ """
+ self.__raw_execute("clean", {"job_id": self.job_id})
+
+ @parseJson()
+ def setup(self, tool_id=None, tool_version=None):
+ """
+ Setup remote LWR server to run this job.
+ """
+ setup_args = {"job_id": self.job_id}
+ if tool_id:
+ setup_args["tool_id"] = tool_id
+ if tool_version:
+ setup_args["tool_version"] = tool_version
+ return self.__raw_execute("setup", setup_args)
diff -r 2b4d6af45a04ddb49c2c87a0ead3874503d35832 -r 547d6844325ad305cc5d713365e3d66d75bbb5c1 lib/galaxy/jobs/runners/lwr_client/destination.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/destination.py
@@ -0,0 +1,40 @@
+
+from re import match
+
+
+def url_to_destination_params(url):
+ """Convert a legacy runner URL to a job destination
+
+ >>> params_simple = url_to_destination_params("http://localhost:8913/")
+ >>> params_simple["url"]
+ 'http://localhost:8913/'
+ >>> params_simple["private_token"] is None
+ True
+ >>> advanced_url = "https://1234x@example.com:8914/managers/longqueue"
+ >>> params_advanced = url_to_destination_params(advanced_url)
+ >>> params_advanced["url"]
+ 'https://example.com:8914/managers/longqueue/'
+ >>> params_advanced["private_token"]
+ '1234x'
+ """
+
+ if url.startswith("lwr://"):
+ url = url[len("lwr://"):]
+
+ if not url.endswith("/"):
+ url += "/"
+
+ ## Check for private token embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_token_format = "https?://(.*)@.*/?"
+ private_token_match = match(private_token_format, url)
+ private_token = None
+ if private_token_match:
+ private_token = private_token_match.group(1)
+ url = url.replace("%s@" % private_token, '', 1)
+
+ destination_args = {"url": url,
+ "private_token": private_token}
+
+ return destination_args
diff -r 2b4d6af45a04ddb49c2c87a0ead3874503d35832 -r 547d6844325ad305cc5d713365e3d66d75bbb5c1 lib/galaxy/jobs/runners/lwr_client/stager.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/stager.py
@@ -0,0 +1,256 @@
+
+import os
+from re import findall
+
+
+class JobInputs(object):
+ """
+ Abstractions over dynamic inputs created for a given job (namely the command to
+ execute and created configfiles).
+
+ **Parameters**
+
+ command_line : str
+ Local command to execute for this job. (To be rewritten.)
+ config_files : str
+ Config files created for this job. (To be rewritten.)
+
+
+ >>> import tempfile
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> def setup_inputs(tf):
+ ... open(tf.name, "w").write("world /path/to/input the rest")
+ ... inputs = JobInputs("hello /path/to/input", [tf.name])
+ ... return inputs
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
+ >>> inputs.rewritten_command_line
+ 'hello C:\\\\input'
+ >>> inputs.rewritten_config_files[tf.name]
+ 'world C:\\\\input the rest'
+ >>> tf.close()
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.find_referenced_subfiles('/path/to')
+ ['/path/to/input']
+ >>> inputs.path_referenced('/path/to')
+ True
+ >>> inputs.path_referenced('/path/to/input')
+ True
+ >>> inputs.path_referenced('/path/to/notinput')
+ False
+ >>> tf.close()
+ """
+
+ def __init__(self, command_line, config_files):
+ self.rewritten_command_line = command_line
+ self.rewritten_config_files = {}
+ for config_file in config_files or []:
+ config_contents = _read(config_file)
+ self.rewritten_config_files[config_file] = config_contents
+
+ def find_referenced_subfiles(self, directory):
+ """
+ Return list of files below specified `directory` in job inputs. Could
+ use more sophisticated logic (match quotes to handle spaces, handle
+ subdirectories, etc...).
+
+ **Parameters**
+
+ directory : str
+ Full path to directory to search.
+
+ """
+ pattern = r"(%s%s\S+)" % (directory, os.sep)
+ referenced_files = set()
+ for input_contents in self.__items():
+ referenced_files.update(findall(pattern, input_contents))
+ return list(referenced_files)
+
+ def path_referenced(self, path):
+ pattern = r"%s" % path
+ found = False
+ for input_contents in self.__items():
+ if findall(pattern, input_contents):
+ found = True
+ break
+ return found
+
+ def rewrite_paths(self, local_path, remote_path):
+ """
+ Rewrite references to `local_path` with `remote_path` in job inputs.
+ """
+ self.__rewrite_command_line(local_path, remote_path)
+ self.__rewrite_config_files(local_path, remote_path)
+
+ def __rewrite_command_line(self, local_path, remote_path):
+ self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
+
+ def __rewrite_config_files(self, local_path, remote_path):
+ for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
+ self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
+
+ def __items(self):
+ items = [self.rewritten_command_line]
+ items.extend(self.rewritten_config_files.values())
+ return items
+
+
+class FileStager(object):
+ """
+ Objects of the FileStager class interact with an LWR client object to
+ stage the files required to run jobs on a remote LWR server.
+
+ **Parameters**
+
+ client : Client
+ LWR client object.
+ command_line : str
+ The local command line to execute, this will be rewritten for the remote server.
+ config_files : list
+ List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
+ input_files : list
+ List of input files used by job. These will be transferred and references rewritten.
+ output_files : list
+ List of output_files produced by job.
+ tool_dir : str
+ Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
+ working_directory : str
+ Local path created by Galaxy for running this job.
+
+ """
+
+ def __init__(self, client, tool, command_line, config_files, input_files, output_files, working_directory):
+ """
+ """
+ self.client = client
+ self.command_line = command_line
+ self.config_files = config_files
+ self.input_files = input_files
+ self.output_files = output_files
+ self.tool_id = tool.id
+ self.tool_version = tool.version
+ self.tool_dir = os.path.abspath(tool.tool_dir)
+ self.working_directory = working_directory
+
+ # Setup job inputs, these will need to be rewritten before
+ # shipping off to remote LWR server.
+ self.job_inputs = JobInputs(self.command_line, self.config_files)
+
+ self.file_renames = {}
+
+ self.__handle_setup()
+ self.__initialize_referenced_tool_files()
+ self.__upload_tool_files()
+ self.__upload_input_files()
+ self.__upload_working_directory_files()
+ self.__initialize_output_file_renames()
+ self.__initialize_task_output_file_renames()
+ self.__initialize_config_file_renames()
+ self.__handle_rewrites()
+ self.__upload_rewritten_config_files()
+
+ def __handle_setup(self):
+ job_config = self.client.setup(self.tool_id, self.tool_version)
+
+ self.new_working_directory = job_config['working_directory']
+ self.new_outputs_directory = job_config['outputs_directory']
+ self.remote_path_separator = job_config['path_separator']
+ # If remote LWR server assigned job id, use that otherwise
+ # just use local job_id assigned.
+ galaxy_job_id = self.client.job_id
+ self.job_id = job_config.get('job_id', galaxy_job_id)
+ if self.job_id != galaxy_job_id:
+ # Remote LWR server assigned an id different than the
+ # Galaxy job id, update client to reflect this.
+ self.client.job_id = self.job_id
+
+ def __initialize_referenced_tool_files(self):
+ self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
+
+ def __upload_tool_files(self):
+ for referenced_tool_file in self.referenced_tool_files:
+ tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
+ self.file_renames[referenced_tool_file] = tool_upload_response['path']
+
+ def __upload_input_files(self):
+ for input_file in self.input_files:
+ self.__upload_input_file(input_file)
+ self.__upload_input_extra_files(input_file)
+
+ def __upload_input_file(self, input_file):
+ if self.job_inputs.path_referenced(input_file):
+ input_upload_response = self.client.upload_input(input_file)
+ self.file_renames[input_file] = input_upload_response['path']
+
+ def __upload_input_extra_files(self, input_file):
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path) and self.job_inputs.path_referenced(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
+
+ def __upload_working_directory_files(self):
+ # Task manager stages files into working directory, these need to be
+ # uploaded if present.
+ for working_directory_file in os.listdir(self.working_directory):
+ path = os.path.join(self.working_directory, working_directory_file)
+ working_file_response = self.client.upload_working_directory_file(path)
+ self.file_renames[path] = working_file_response['path']
+
+ def __initialize_output_file_renames(self):
+ for output_file in self.output_files:
+ self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
+ self.remote_path_separator,
+ os.path.basename(output_file))
+
+ def __initialize_task_output_file_renames(self):
+ for output_file in self.output_files:
+ name = os.path.basename(output_file)
+ self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ name)
+
+ def __initialize_config_file_renames(self):
+ for config_file in self.config_files:
+ self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ os.path.basename(config_file))
+
+ def __rewrite_paths(self, contents):
+ new_contents = contents
+ for local_path, remote_path in self.file_renames.iteritems():
+ new_contents = new_contents.replace(local_path, remote_path)
+ return new_contents
+
+ def __handle_rewrites(self):
+ for local_path, remote_path in self.file_renames.iteritems():
+ self.job_inputs.rewrite_paths(local_path, remote_path)
+
+ def __upload_rewritten_config_files(self):
+ for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
+ self.client.upload_config_file(config_file, new_config_contents)
+
+ def get_rewritten_command_line(self):
+ """
+ Returns the rewritten version of the command line to execute suitable
+ for remote host.
+ """
+ return self.job_inputs.rewritten_command_line
+
+
+def _read(path):
+ """
+ Utility method to quickly read small files (config files and tool
+ wrappers) into memory as strings.
+ """
+ input = open(path, "r")
+ try:
+ return input.read()
+ finally:
+ input.close()
https://bitbucket.org/galaxy/galaxy-central/commits/89fa5f5873e4/
changeset: 89fa5f5873e4
user: jmchilton
date: 2013-03-10 06:51:59
summary: Update job_conf.xml.sample_advanced with LWR usage examples.
affected #: 1 file
diff -r 547d6844325ad305cc5d713365e3d66d75bbb5c1 -r 89fa5f5873e45f4e1ef5eabbf226d1dd528ff4d1 job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -7,6 +7,7 @@
<plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner"/><plugin id="pbs" type="runner" load="galaxy.jobs.runners.pbs:PBSJobRunner" workers="2"/><plugin id="gridengine" type="runner" load="galaxy.jobs.runners.drmaa:DRMAARunner"/>
+ <plugin id="lwr" type="runner" load="galaxy.jobs.runners.lwr.LwrJobRunner" /><!-- https://lwr.readthedocs.org --></plugins><handlers default="handlers"><!-- Additional job handlers - the id should match the name of a
@@ -40,6 +41,11 @@
<param id="type">python</param><param id="function">foo</param></destination>
+ <destination id="secure_lwr" runner="lwr">
+ <param id="url">https://windowshost.examle.com:8913/</param>
+ <!-- If set, private_token must match token remote LWR server configured with. -->
+ <param id="private_token">123456789changeme</param>
+ </destination></destinations><tools><!-- Tools can be configured to use specific destinations or handlers,
https://bitbucket.org/galaxy/galaxy-central/commits/d0dd203bdaaf/
changeset: d0dd203bdaaf
user: jmchilton
date: 2013-03-10 06:51:59
summary: Small pbs runner fixes.
affected #: 1 file
diff -r 89fa5f5873e45f4e1ef5eabbf226d1dd528ff4d1 -r d0dd203bdaafd5507998241cdd096430e4f21f1f lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -173,7 +173,9 @@
return JobDestination(runner='pbs', params=params)
def parse_destination_params(self, params):
- """A wrapper method around __args_to_attrs() that allow administrators to define PBS params as either command-line options (as in ``qsub(1B)``) or more human-readable "long" args (as in ``pbs_submit(3B)``).
+ """A wrapper method around __args_to_attrs() that allow administrators to define PBS
+ params as either command-line options (as in ``qsub(1B)``) or more human-readable "long"
+ args (as in ``pbs_submit(3B)``).
:returns: list of dicts -- The dicts map directly to pbs attropl structs (see ``pbs_submit(3B)``)
"""
@@ -185,7 +187,7 @@
arg = arg.lstrip('-')
args[arg] = value
except:
- log.warning('Unrecognized long argument in destination params: %s' % k)
+ log.warning('Unrecognized long argument in destination params: %s' % arg)
return self.__args_to_attrs(args)
# Internal stuff
https://bitbucket.org/galaxy/galaxy-central/commits/4e0659495eb2/
changeset: 4e0659495eb2
user: jmchilton
date: 2013-03-10 06:51:59
summary: Allow child param elements to appear in plugin elements of job_conf.xml. Pass these as keyword arguments into job runner constructor.
affected #: 1 file
diff -r d0dd203bdaafd5507998241cdd096430e4f21f1f -r 4e0659495eb2ad674bcc0a08cc1a9d211f691104 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -126,7 +126,12 @@
for plugin in self.__findall_with_required(plugins, 'plugin', ('id', 'type', 'load')):
if plugin.get('type') == 'runner':
workers = plugin.get('workers', plugins.get('workers', JobConfiguration.DEFAULT_NWORKERS))
- self.runner_plugins.append(dict(id=plugin.get('id'), load=plugin.get('load'), workers=int(workers)))
+ runner_kwds = self.__get_params(plugin)
+ runner_info = dict(id=plugin.get('id'),
+ load=plugin.get('load'),
+ workers=int(workers),
+ kwds=runner_kwds)
+ self.runner_plugins.append(runner_info)
else:
log.error('Unknown plugin type: %s' % plugin.get('type'))
# Load tasks if configured
@@ -480,7 +485,7 @@
log.warning("Job runner classes must be subclassed from BaseJobRunner, %s has bases: %s" % (id, runner_class.__bases__))
continue
try:
- rval[id] = runner_class( self.app, runner['workers'] )
+ rval[id] = runner_class( self.app, runner['workers'], **runner['kwds'] )
except TypeError:
log.warning( "Job runner '%s:%s' has not been converted to a new-style runner" % ( module_name, class_name ) )
rval[id] = runner_class( self.app )
https://bitbucket.org/galaxy/galaxy-central/commits/71da862d92ab/
changeset: 71da862d92ab
user: jmchilton
date: 2013-03-10 06:51:59
summary: Use new plugin param passing from previous changeset to provide a much cleaner mechanism for specifing LWR transport type. To enable pycurl based transport for the LWR client, now one just needs to add <param id="transport">curl</param> to the plugin definition in job_conf.xml.
Updates LWR client through LWR changeset: https://bitbucket.org/jmchilton/lwr/commits/b09da65bd2ffe12b28cf6c545c7d052….
affected #: 4 files
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/3b6c209d2d07/
changeset: 3b6c209d2d07
user: jmchilton
date: 2013-03-17 18:39:17
summary: Task job runner fixes.
affected #: 2 files
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/67b37b0c11e7/
changeset: 67b37b0c11e7
user: jmchilton
date: 2013-03-17 20:14:22
summary: Fix for job runner configuration dicts without new kwds pair present.
affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/7a4dd13dac9b/
changeset: 7a4dd13dac9b
user: natefoo
date: 2013-03-18 16:21:22
summary: Merged in jmchilton/galaxy-central-lwr (pull request #138)
LWR Updates and Other Small Job Destination Tweaks
affected #: 10 files
Diff not available.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
0 new commits in galaxy-central:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Rename community_wsgi.ini.sample to be tool_shed_wsgi.ini.sample, run_community.sh to run_tool_shed.sh, and fix references to these files in the code.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/da3aece419ca/
changeset: da3aece419ca
user: greg
date: 2013-03-18 15:24:03
summary: Rename community_wsgi.ini.sample to be tool_shed_wsgi.ini.sample, run_community.sh to run_tool_shed.sh, and fix references to these files in the code.
affected #: 8 files
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b community_wsgi.ini.sample
--- a/community_wsgi.ini.sample
+++ /dev/null
@@ -1,98 +0,0 @@
-# ---- HTTP Server ----------------------------------------------------------
-
-[server:main]
-
-use = egg:Paste#http
-port = 9009
-
-# The address on which to listen. By default, only listen to localhost (the tool shed will not
-# be accessible over the network). Use '0.0.0.0' to listen on all available network interfaces.
-#host = 0.0.0.0
-host = 127.0.0.1
-
-use_threadpool = true
-threadpool_workers = 10
-
-# ---- Galaxy Webapps Community Interface -------------------------------------------------
-
-[app:main]
-
-# Specifies the factory for the universe WSGI application
-paste.app_factory = galaxy.webapps.tool_shed.buildapp:app_factory
-log_level = DEBUG
-
-# Database connection
-database_file = database/community.sqlite
-# You may use a SQLAlchemy connection string to specify an external database instead
-#database_connection = postgres:///community_test?host=/var/run/postgresql
-
-# Where the hgweb.config file is stored. The default is the Galaxy installation directory.
-#hgweb_config_dir = None
-
-# Where dataset files are saved
-file_path = database/community_files
-# Temporary storage for additional datasets, this should be shared through the cluster
-new_file_path = database/tmp
-
-# Session support (beaker)
-use_beaker_session = True
-session_type = memory
-session_data_dir = %(here)s/database/beaker_sessions
-session_key = galaxysessions
-session_secret = changethisinproduction
-
-# -- Users and Security
-
-# Galaxy encodes various internal values when these values will be output in
-# some format (for example, in a URL or cookie). You should set a key to be
-# used by the algorithm that encodes and decodes these values. It can be any
-# string. If left unchanged, anyone could construct a cookie that would grant
-# them access to others' sessions.
-id_secret = changethisinproductiontoo
-
-# User authentication can be delegated to an upstream proxy server (usually
-# Apache). The upstream proxy should set a REMOTE_USER header in the request.
-# Enabling remote user disables regular logins. For more information, see:
-# http://wiki.g2.bx.psu.edu/Admin/Config/Apache%20Proxy
-#use_remote_user = False
-
-# Configuration for debugging middleware
-debug = true
-use_lint = false
-
-# NEVER enable this on a public site (even test or QA)
-# use_interactive = true
-
-# this should be a comma-separated list of valid Galaxy users
-#admin_users = user1@example.org,user2@example.org
-
-# Force everyone to log in (disable anonymous access)
-require_login = False
-
-# path to sendmail
-sendmail_path = /usr/sbin/sendmail
-
-# For use by email messages sent from the tool shed
-#smtp_server = smtp.your_tool_shed_server
-#email_from = your_tool_shed_email@server
-
-# The URL linked by the "Support" link in the "Help" menu.
-#support_url = http://wiki.g2.bx.psu.edu/Support
-
-# Write thread status periodically to 'heartbeat.log' (careful, uses disk space rapidly!)
-## use_heartbeat = True
-
-# Profiling middleware (cProfile based)
-## use_profile = True
-
-# Use the new iframe / javascript based layout
-use_new_layout = true
-
-# Serving static files (needed if running standalone)
-static_enabled = True
-static_cache_time = 360
-static_dir = %(here)s/static/
-static_images_dir = %(here)s/static/images
-static_favicon_dir = %(here)s/static/favicon.ico
-static_scripts_dir = %(here)s/static/scripts/
-static_style_dir = %(here)s/static/june_2007_style/blue
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b run_community.sh
--- a/run_community.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/sh
-
-cd `dirname $0`
-
-SAMPLES="
- community_wsgi.ini.sample
- datatypes_conf.xml.sample
- external_service_types_conf.xml.sample
- migrated_tools_conf.xml.sample
- reports_wsgi.ini.sample
- shed_tool_conf.xml.sample
- tool_conf.xml.sample
- shed_tool_data_table_conf.xml.sample
- tool_data_table_conf.xml.sample
- tool_sheds_conf.xml.sample
- openid_conf.xml.sample
- universe_wsgi.ini.sample
- tool-data/shared/ncbi/builds.txt.sample
- tool-data/shared/ensembl/builds.txt.sample
- tool-data/shared/ucsc/builds.txt.sample
- tool-data/shared/ucsc/publicbuilds.txt.sample
- tool-data/shared/igv/igv_build_sites.txt.sample
- tool-data/shared/rviewer/rviewer_build_sites.txt.sample
- tool-data/*.sample
- static/welcome.html.sample
-"
-
-# Create any missing config/location files
-for sample in $SAMPLES; do
- file=`echo $sample | sed -e 's/\.sample$//'`
- if [ ! -f "$file" -a -f "$sample" ]; then
- echo "Initializing $file from `basename $sample`"
- cp $sample $file
- fi
-done
-
-python ./scripts/paster.py serve community_wsgi.ini --pid-file=community_webapp.pid --log-file=community_webapp.log $@
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b run_tool_shed.sh
--- /dev/null
+++ b/run_tool_shed.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+
+cd `dirname $0`
+
+SAMPLES="
+ tool_shed_wsgi.ini.sample
+ datatypes_conf.xml.sample
+ external_service_types_conf.xml.sample
+ migrated_tools_conf.xml.sample
+ reports_wsgi.ini.sample
+ shed_tool_conf.xml.sample
+ tool_conf.xml.sample
+ shed_tool_data_table_conf.xml.sample
+ tool_data_table_conf.xml.sample
+ tool_sheds_conf.xml.sample
+ openid_conf.xml.sample
+ universe_wsgi.ini.sample
+ tool-data/shared/ncbi/builds.txt.sample
+ tool-data/shared/ensembl/builds.txt.sample
+ tool-data/shared/ucsc/builds.txt.sample
+ tool-data/shared/ucsc/publicbuilds.txt.sample
+ tool-data/shared/igv/igv_build_sites.txt.sample
+ tool-data/shared/rviewer/rviewer_build_sites.txt.sample
+ tool-data/*.sample
+ static/welcome.html.sample
+"
+
+# Create any missing config/location files
+for sample in $SAMPLES; do
+ file=`echo $sample | sed -e 's/\.sample$//'`
+ if [ ! -f "$file" -a -f "$sample" ]; then
+ echo "Initializing $file from `basename $sample`"
+ cp $sample $file
+ fi
+done
+
+python ./scripts/paster.py serve tool_shed_wsgi.ini --pid-file=tool_shed_webapp.pid --log-file=tool_shed_webapp.log $@
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b scripts/db_shell.py
--- a/scripts/db_shell.py
+++ b/scripts/db_shell.py
@@ -30,7 +30,7 @@
# Need to pop the last arg so the command line args will be correct
# for sqlalchemy-migrate
webapp = sys.argv.pop()
- config_file = 'community_wsgi.ini'
+ config_file = 'tool_shed_wsgi.ini'
repo = 'lib/galaxy/webapps/tool_shed/model/migrate'
else:
# Poor man's optparse
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b scripts/manage_db.py
--- a/scripts/manage_db.py
+++ b/scripts/manage_db.py
@@ -18,7 +18,7 @@
# Need to pop the last arg so the command line args will be correct
# for sqlalchemy-migrate
webapp = sys.argv.pop()
- config_file = 'community_wsgi.ini'
+ config_file = 'tool_shed_wsgi.ini'
repo = 'lib/galaxy/webapps/tool_shed/model/migrate'
else:
# Poor man's optparse
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b scripts/tool_shed/migrate_tools_to_repositories.py
--- a/scripts/tool_shed/migrate_tools_to_repositories.py
+++ b/scripts/tool_shed/migrate_tools_to_repositories.py
@@ -10,7 +10,7 @@
0. This script must be run on a repo updated to changeset: 5621:4618be57481b
-1. Before running this script, make sure the following config setting is set in community_wsgi.ini
+1. Before running this script, make sure the following config setting is set in tool_shed_wsgi.ini
# Enable next-gen tool shed features
enable_next_gen_tool_shed = True
@@ -262,7 +262,7 @@
print " "
print "##########################################"
print "%s - Migrating current tool archives to new tool repositories" % now
- # community_wsgi.ini file
+ # tool_shed_wsgi.ini file
ini_file = sys.argv[1]
conf_parser = ConfigParser.ConfigParser( {'here':os.getcwd()} )
conf_parser.read( ini_file )
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -175,7 +175,7 @@
os.environ[ 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF' ] = shed_tool_data_table_conf_file
# ---- Build Tool Shed Application --------------------------------------------------
toolshedapp = None
- global_conf = { '__file__' : 'community_wsgi.ini.sample' }
+ global_conf = { '__file__' : 'tool_shed_wsgi.ini.sample' }
# if not toolshed_database_connection.startswith( 'sqlite://' ):
# kwargs[ 'database_engine_option_max_overflow' ] = '20'
if tool_dependency_dir is not None:
diff -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 -r da3aece419ca2940c4a8a3a059e3bfe25a98098b tool_shed_wsgi.ini.sample
--- /dev/null
+++ b/tool_shed_wsgi.ini.sample
@@ -0,0 +1,98 @@
+# ---- HTTP Server ----------------------------------------------------------
+
+[server:main]
+
+use = egg:Paste#http
+port = 9009
+
+# The address on which to listen. By default, only listen to localhost (the tool shed will not
+# be accessible over the network). Use '0.0.0.0' to listen on all available network interfaces.
+#host = 0.0.0.0
+host = 127.0.0.1
+
+use_threadpool = true
+threadpool_workers = 10
+
+# ---- Galaxy Webapps Community Interface -------------------------------------------------
+
+[app:main]
+
+# Specifies the factory for the universe WSGI application
+paste.app_factory = galaxy.webapps.tool_shed.buildapp:app_factory
+log_level = DEBUG
+
+# Database connection
+database_file = database/community.sqlite
+# You may use a SQLAlchemy connection string to specify an external database instead
+#database_connection = postgres:///community_test?host=/var/run/postgresql
+
+# Where the hgweb.config file is stored. The default is the Galaxy installation directory.
+#hgweb_config_dir = None
+
+# Where dataset files are saved
+file_path = database/community_files
+# Temporary storage for additional datasets, this should be shared through the cluster
+new_file_path = database/tmp
+
+# Session support (beaker)
+use_beaker_session = True
+session_type = memory
+session_data_dir = %(here)s/database/beaker_sessions
+session_key = galaxysessions
+session_secret = changethisinproduction
+
+# -- Users and Security
+
+# Galaxy encodes various internal values when these values will be output in
+# some format (for example, in a URL or cookie). You should set a key to be
+# used by the algorithm that encodes and decodes these values. It can be any
+# string. If left unchanged, anyone could construct a cookie that would grant
+# them access to others' sessions.
+id_secret = changethisinproductiontoo
+
+# User authentication can be delegated to an upstream proxy server (usually
+# Apache). The upstream proxy should set a REMOTE_USER header in the request.
+# Enabling remote user disables regular logins. For more information, see:
+# http://wiki.g2.bx.psu.edu/Admin/Config/Apache%20Proxy
+#use_remote_user = False
+
+# Configuration for debugging middleware
+debug = true
+use_lint = false
+
+# NEVER enable this on a public site (even test or QA)
+# use_interactive = true
+
+# this should be a comma-separated list of valid Galaxy users
+#admin_users = user1@example.org,user2@example.org
+
+# Force everyone to log in (disable anonymous access)
+require_login = False
+
+# path to sendmail
+sendmail_path = /usr/sbin/sendmail
+
+# For use by email messages sent from the tool shed
+#smtp_server = smtp.your_tool_shed_server
+#email_from = your_tool_shed_email@server
+
+# The URL linked by the "Support" link in the "Help" menu.
+#support_url = http://wiki.g2.bx.psu.edu/Support
+
+# Write thread status periodically to 'heartbeat.log' (careful, uses disk space rapidly!)
+## use_heartbeat = True
+
+# Profiling middleware (cProfile based)
+## use_profile = True
+
+# Use the new iframe / javascript based layout
+use_new_layout = true
+
+# Serving static files (needed if running standalone)
+static_enabled = True
+static_cache_time = 360
+static_dir = %(here)s/static/
+static_images_dir = %(here)s/static/images
+static_favicon_dir = %(here)s/static/favicon.ico
+static_scripts_dir = %(here)s/static/scripts/
+static_style_dir = %(here)s/static/june_2007_style/blue
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Import clean up and documentation for compute tool.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6be1fed44bf6/
changeset: 6be1fed44bf6
user: jgoecks
date: 2013-03-18 15:09:43
summary: Import clean up and documentation for compute tool.
affected #: 1 file
diff -r 0ddc03509b24d4d4e9024902546025df185c6a18 -r 6be1fed44bf65402f11d2bd4b576fa76941f1393 tools/stats/column_maker.py
--- a/tools/stats/column_maker.py
+++ b/tools/stats/column_maker.py
@@ -1,13 +1,13 @@
#!/usr/bin/env python
# This tool takes a tab-delimited textfile as input and creates another column in the file which is the result of
# a computation performed on every row in the original file. The tool will skip over invalid lines within the file,
-# informing the user about the number of lines skipped.
-import sys, re, os.path
-from galaxy import eggs
-from galaxy.tools import validation
-from galaxy.datatypes import metadata
+# informing the user about the number of lines skipped.
+
+import sys, re
+# These functions may be used in compute expression:
from math import log,exp,sqrt,ceil,floor
+
assert sys.version_info[:2] >= ( 2, 4 )
def stop_err( msg ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Eliminate a duplicate method from shed_util_common and add missing documentation to several utility methods.
by commits-noreply@bitbucket.org 18 Mar '13
by commits-noreply@bitbucket.org 18 Mar '13
18 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0ddc03509b24/
changeset: 0ddc03509b24
user: greg
date: 2013-03-18 14:55:28
summary: Eliminate a duplicate method from shed_util_common and add missing documentation to several utility methods.
affected #: 6 files
diff -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 -r 0ddc03509b24d4d4e9024902546025df185c6a18 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -184,7 +184,7 @@
params = util.Params( kwd )
repository_id = params.get( 'id', None )
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
url = suc.url_join( tool_shed_url,
'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
( web.url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
@@ -314,7 +314,7 @@
need it so that we can derive the tool shed from which it was installed.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
url = suc.url_join( tool_shed_url,
'repository/get_repository_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
( repository_name, repository_owner, changeset_revision ) )
@@ -337,7 +337,7 @@
it so that we can derive the tool shed from which it was installed.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
url = suc.url_join( tool_shed_url,
'repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
( repository_name, repository_owner, changeset_revision ) )
@@ -359,7 +359,7 @@
uninstalled tool shed repository.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
url = suc.url_join( tool_shed_url,
'repository/get_updated_repository_information?name=%s&owner=%s&changeset_revision=%s' % \
( repository_name, repository_owner, changeset_revision ) )
@@ -654,7 +654,7 @@
suc.update_tool_shed_repository_status( trans.app,
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
url = suc.url_join( tool_shed_url,
'/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
@@ -711,7 +711,7 @@
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
if not repository:
return trans.show_error_message( 'Invalid repository specified.' )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
tool_shed_repository_ids = [ repository_id ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
@@ -1172,7 +1172,7 @@
clone_dir = os.path.join( tool_path, suc.generate_tool_shed_repository_install_dir( repository_clone_url,
tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
tool_section = None
tool_panel_section = kwd.get( 'tool_panel_section', '' )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
@@ -1317,7 +1317,7 @@
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
metadata = tool_shed_repository.metadata
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app )
if latest_changeset_revision and latest_ctx_rev:
# There are updates available in the tool shed for the repository, so use the receieved dependency information which was retrieved from
@@ -1481,7 +1481,7 @@
def reset_repository_metadata( self, trans, id ):
"""Reset all metadata on a single installed tool shed repository."""
repository = suc.get_installed_tool_shed_repository( trans, id )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
@@ -1560,7 +1560,7 @@
metadata for the repository's revision in the Galaxy database.
"""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
url = suc.url_join( tool_shed_url,
'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
( repository.name, repository.owner, repository.changeset_revision ) )
diff -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 -r 0ddc03509b24d4d4e9024902546025df185c6a18 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -279,7 +279,7 @@
if self.__isinstalled( clone_dir ):
print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
else:
- tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
+ tool_shed_url = suc.get_url_from_tool_shed( self.app, self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( relative_clone_dir, name )
install_dir = os.path.join( clone_dir, name )
@@ -351,16 +351,6 @@
@property
def non_shed_tool_panel_configs( self ):
return common_util.get_non_shed_tool_panel_configs( self.app )
- def __get_url_from_tool_shed( self, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
- return None
def __isinstalled( self, clone_dir ):
full_path = os.path.abspath( clone_dir )
if os.path.exists( full_path ):
diff -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 -r 0ddc03509b24d4d4e9024902546025df185c6a18 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -77,7 +77,7 @@
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
changeset_revision_dict = {}
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \
( repository.name, repository.owner, repository.installed_changeset_revision ) )
try:
diff -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 -r 0ddc03509b24d4d4e9024902546025df185c6a18 lib/tool_shed/galaxy_install/update_manager.py
--- a/lib/tool_shed/galaxy_install/update_manager.py
+++ b/lib/tool_shed/galaxy_install/update_manager.py
@@ -34,7 +34,7 @@
self.sleeper.sleep( self.seconds_to_sleep )
log.info( 'Transfer job restarter shutting down...' )
def check_for_update( self, repository ):
- tool_shed_url = suc.get_url_from_repository_tool_shed( self.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( self.app, repository.tool_shed )
url = '%s/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&from_update_manager=True' % \
( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
try:
diff -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 -r 0ddc03509b24d4d4e9024902546025df185c6a18 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1432,7 +1432,7 @@
def reset_all_metadata_on_installed_repository( trans, id ):
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
repository = suc.get_installed_tool_shed_repository( trans, id )
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
diff -r 92275794f6efe2f2e17c2d3785061e88f2f23f84 -r 0ddc03509b24d4d4e9024902546025df185c6a18 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -160,7 +160,7 @@
return False, error_message
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
- # Persist the current in-memory list of config_elems to a file named by the value of config_filename.
+ """Persist the current in-memory list of config_elems to a file named by the value of config_filename."""
fd, filename = tempfile.mkstemp()
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
@@ -242,7 +242,7 @@
def generate_clone_url_for_installed_repository( app, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = get_url_from_repository_tool_shed( app, repository )
+ tool_shed_url = get_url_from_tool_shed( app, repository.tool_shed )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
@@ -469,7 +469,7 @@
return None
def get_installed_tool_shed_repository( trans, id ):
- """Get a repository on the Galaxy side from the database via id"""
+ """Get a tool shed repository record from the Galaxy database defined by the id."""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
@@ -516,6 +516,11 @@
return None
def get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ):
+ """
+ Return a tool shed repository database record defined by the combination of tool shed, repository name, repository owner and changeset_revision
+ or installed_changeset_revision. A new tool shed repository record will be created if one is not located.
+ """
+ # This method is used only in Galaxy, not the tool shed.
repository = get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
if not repository:
tool_shed_url = get_url_from_tool_shed( trans.app, tool_shed )
@@ -572,7 +577,7 @@
previous_changeset_revision = changeset_revision
def get_repo_info_tuple_contents( repo_info_tuple ):
- # Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.
+ """Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced."""
if len( repo_info_tuple ) == 6:
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
repository_dependencies = None
@@ -613,6 +618,8 @@
return None
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
+ """Return a tool shed repository database record that is defined by either the current changeset revision or the installed_changeset_revision."""
+ # This method is used only in Galaxy, not the tool shed.
repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
tool_shed=tool_shed,
name=name,
@@ -627,6 +634,7 @@
return repository
def get_repository_file_contents( file_path ):
+ """Return the display-safe contents of a repository file."""
if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
elif checkers.is_bz2( file_path ):
@@ -646,6 +654,7 @@
return safe_str
def get_repository_files( trans, folder_path ):
+ """Return the file hierarchy of a tool shed repository."""
contents = []
for item in os.listdir( folder_path ):
# Skip .hg directories
@@ -684,6 +693,7 @@
return None
def get_repository_owner( cleaned_repository_url ):
+ """Gvien a "cleaned" repository clone URL, return the owner of the repository."""
items = cleaned_repository_url.split( '/repos/' )
repo_path = items[ 1 ]
if repo_path.startswith( '/' ):
@@ -691,11 +701,13 @@
return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
def get_repository_owner_from_clone_url( repository_clone_url ):
+ """Given a repository clone URL, return the owner of the repository."""
tmp_url = clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( '/repos/' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
def get_repository_tools_tups( app, metadata_dict ):
+ """Return a list of tuples of the form (relative_path, guid, tool) for each tool defined in the received tool shed repository metadata."""
repository_tools_tups = []
index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
if 'tools' in metadata_dict:
@@ -713,6 +725,7 @@
return repository_tools_tups
def get_reversed_changelog_changesets( repo ):
+ """Return a list of changesets in reverse order from that provided by the repository manifest."""
reversed_changelog = []
for changeset in repo.changelog:
reversed_changelog.insert( 0, changeset )
@@ -728,6 +741,7 @@
return "-1:%s" % changeset_revision
def get_rev_label_from_changeset_revision( repo, changeset_revision ):
+ """Given a changeset revision hash, return two strings, the changeset rev and the changeset revision hash."""
ctx = get_changectx_for_changeset( repo, changeset_revision )
if ctx:
rev = '%04d' % ctx.rev()
@@ -748,8 +762,10 @@
return index, shed_tool_conf_dict
def get_tool_panel_config_tool_path_install_dir( app, repository ):
- # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
- # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
+ """
+ Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the repository is installed.
+ This method assumes all repository tools are defined in a single shed-related tool panel config.
+ """
tool_shed = clean_tool_shed_url( repository.tool_shed )
partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
@@ -782,11 +798,14 @@
return None
def get_tool_shed_repository_by_id( trans, repository_id ):
+ """Return a tool shed repository database record defined by the id."""
+ # This method is used only in Galaxy, not the tool shed.
return trans.sa_session.query( trans.model.ToolShedRepository ) \
.filter( trans.model.ToolShedRepository.table.c.id == trans.security.decode_id( repository_id ) ) \
.first()
def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
+ """Return a tool shed repository database record defined by the combination of a tool_shed, repository name, repository owner and current changeet_revision."""
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
if tool_shed.find( '//' ) > 0:
@@ -800,6 +819,7 @@
.first()
def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ):
+ """Return a tool shed repository database record defined by the combination of a tool_shed, repository name, repository owner and installed_changeet_revision."""
# This method is used only in Galaxy, not the tool shed.
sa_session = app.model.context.current
if tool_shed.find( '//' ) > 0:
@@ -816,23 +836,12 @@
tmp_url = clean_repository_clone_url( repository_clone_url )
return tmp_url.split( '/repos/' )[ 0 ].rstrip( '/' )
-def get_url_from_repository_tool_shed( app, repository ):
+def get_url_from_tool_shed( app, tool_shed ):
"""
- The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
- something like: http://toolshed.g2.bx.psu.edu/.
+ The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
+ http://toolshed.g2.bx.psu.edu/
"""
for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( repository.tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
- return None
-
-def get_url_from_tool_shed( app, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
if shed_url.find( tool_shed ) >= 0:
if shed_url.endswith( '/' ):
shed_url = shed_url.rstrip( '/' )
@@ -856,26 +865,28 @@
return None
def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ):
- # There are 2 complementary features that enable a tool shed user to receive email notification:
- # 1. Within User Preferences, they can elect to receive email when the first (or first valid)
- # change set is produced for a new repository.
- # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts"
- # which caused them to receive email alerts when updates to the repository occur. This same feature
- # is available on a per-repository basis on the repository grid within the tool shed.
- #
- # There are currently 4 scenarios for sending email notification when a change is made to a repository:
- # 1. An admin user elects to receive email when the first change set is produced for a new repository
- # from User Preferences. The change set does not have to include any valid content. This allows for
- # the capture of inappropriate content being uploaded to new repositories.
- # 2. A regular user elects to receive email when the first valid change set is produced for a new repository
- # from User Preferences. This differs from 1 above in that the user will not receive email until a
- # change set tha tincludes valid content is produced.
- # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the
- # user is an admin user, the email will include information about both HTML and image content that was
- # included in the change set.
- # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the
- # user is not an admin user, the email will not include any information about both HTML and image content
- # that was included in the change set.
+ """
+ There are 2 complementary features that enable a tool shed user to receive email notification:
+ 1. Within User Preferences, they can elect to receive email when the first (or first valid)
+ change set is produced for a new repository.
+ 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts"
+ which caused them to receive email alerts when updates to the repository occur. This same feature
+ is available on a per-repository basis on the repository grid within the tool shed.
+
+ There are currently 4 scenarios for sending email notification when a change is made to a repository:
+ 1. An admin user elects to receive email when the first change set is produced for a new repository
+ from User Preferences. The change set does not have to include any valid content. This allows for
+ the capture of inappropriate content being uploaded to new repositories.
+ 2. A regular user elects to receive email when the first valid change set is produced for a new repository
+ from User Preferences. This differs from 1 above in that the user will not receive email until a
+ change set tha tincludes valid content is produced.
+ 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the
+ user is an admin user, the email will include information about both HTML and image content that was
+ included in the change set.
+ 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the
+ user is not an admin user, the email will not include any information about both HTML and image content
+ that was included in the change set.
+ """
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( get_configured_ui(), repo_dir )
sharable_link = generate_sharable_link_for_repository_in_tool_shed( trans, repository, changeset_revision=None )
@@ -948,6 +959,7 @@
log.exception( "An error occurred sending a tool shed repository update alert by email." )
def open_repository_files_folder( trans, folder_path ):
+ """Return a list of dictionaries, each of which contains information for a file or directory contained within a directory in a repository file hierarchy."""
try:
files_list = get_repository_files( trans, folder_path )
except OSError, e:
@@ -957,19 +969,20 @@
folder_contents = []
for filename in files_list:
is_folder = False
- if filename and filename[-1] == os.sep:
+ if filename and filename[ -1 ] == os.sep:
is_folder = True
if filename:
full_path = os.path.join( folder_path, filename )
- node = { "title": filename,
- "isFolder": is_folder,
- "isLazy": is_folder,
- "tooltip": full_path,
- "key": full_path }
+ node = { "title" : filename,
+ "isFolder" : is_folder,
+ "isLazy" : is_folder,
+ "tooltip" : full_path,
+ "key" : full_path }
folder_contents.append( node )
return folder_contents
def remove_dir( dir ):
+ """Attempt to remove a directory from disk."""
if os.path.exists( dir ):
try:
shutil.rmtree( dir )
@@ -1029,9 +1042,11 @@
return reversed_changelog
def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
+ """Return a reversed list of changesets in the repository changelog up to and including the included_upper_bounds_changeset_revision."""
return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
def strip_path( fpath ):
+ """Attempt to strip the path from a file name."""
if not fpath:
return fpath
try:
@@ -1072,12 +1087,15 @@
return text
def tool_shed_from_repository_clone_url( repository_clone_url ):
+ """Given a repository clone URL, return the tool shed that contains the repository."""
return clean_repository_clone_url( repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' )
def tool_shed_is_this_tool_shed( toolshed_base_url ):
+ """Determine if a tool shed is the current tool shed."""
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
def translate_string( raw_text, to_html=True ):
+ """Return a subset of a string (up to MAX_CONTENT_SIZE) translated to a safe string for display in a browser."""
if raw_text:
if len( raw_text ) <= MAX_CONTENT_SIZE:
translated_string = to_safe_string( raw_text, to_html=to_html )
@@ -1141,12 +1159,14 @@
commands.update( get_configured_ui(), repo, rev=ctx_rev )
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
+ """Update the status of a tool shed repository in the process of being installed into Galaxy."""
sa_session = app.model.context.current
tool_shed_repository.status = status
sa_session.add( tool_shed_repository )
sa_session.flush()
def url_join( *args ):
+ """Return a valid URL produced by appending a base URL and a set of request parameters."""
parts = []
for arg in args:
parts.append( arg.strip( '/' ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0