commit/galaxy-central: 2 new changesets
by commits-noreply@bitbucket.org
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6dc37563b0b7/
Changeset: 6dc37563b0b7
User: dan
Date: 2013-04-30 23:19:11
Summary: Update GenomeSpace serverurl.properties file location. Should resolve network issues experienced with Jumbo Frames and the Broad.
Affected #: 3 files
diff -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 -r 6dc37563b0b744d8b571b0075110da54fd120df4 tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -11,7 +11,7 @@
log = logging.getLogger( "tools.genomespace.genomespace_exporter" )#( __name__ )
GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.proper..."
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
CHUNK_SIZE = 2**20 #1mb
diff -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 -r 6dc37563b0b744d8b571b0075110da54fd120df4 tools/genomespace/genomespace_file_browser.py
--- a/tools/genomespace/genomespace_file_browser.py
+++ b/tools/genomespace/genomespace_file_browser.py
@@ -9,7 +9,7 @@
import simplejson
GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.proper..."
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
CHUNK_SIZE = 2**20 #1mb
diff -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 -r 6dc37563b0b744d8b571b0075110da54fd120df4 tools/genomespace/genomespace_importer.py
--- a/tools/genomespace/genomespace_importer.py
+++ b/tools/genomespace/genomespace_importer.py
@@ -13,7 +13,7 @@
from galaxy.datatypes.registry import Registry
GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.proper..."
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
CHUNK_SIZE = 2**20 #1mb
https://bitbucket.org/galaxy/galaxy-central/commits/6ab749e35631/
Changeset: 6ab749e35631
Branch: stable
User: dan
Date: 2013-04-30 23:19:11
Summary: Update GenomeSpace serverurl.properties file location. Should resolve network issues experienced with Jumbo Frames and the Broad.
Affected #: 3 files
diff -r b0ea9722b9dd8e0da63208b59be20b90eae0cea7 -r 6ab749e3563153f714fe0699435ae9215ebe4ba2 tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -11,7 +11,7 @@
log = logging.getLogger( "tools.genomespace.genomespace_exporter" )#( __name__ )
GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.proper..."
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
CHUNK_SIZE = 2**20 #1mb
diff -r b0ea9722b9dd8e0da63208b59be20b90eae0cea7 -r 6ab749e3563153f714fe0699435ae9215ebe4ba2 tools/genomespace/genomespace_file_browser.py
--- a/tools/genomespace/genomespace_file_browser.py
+++ b/tools/genomespace/genomespace_file_browser.py
@@ -9,7 +9,7 @@
import simplejson
GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.proper..."
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
CHUNK_SIZE = 2**20 #1mb
diff -r b0ea9722b9dd8e0da63208b59be20b90eae0cea7 -r 6ab749e3563153f714fe0699435ae9215ebe4ba2 tools/genomespace/genomespace_importer.py
--- a/tools/genomespace/genomespace_importer.py
+++ b/tools/genomespace/genomespace_importer.py
@@ -13,7 +13,7 @@
from galaxy.datatypes.registry import Registry
GENOMESPACE_API_VERSION_STRING = "v1.0"
-GENOMESPACE_SERVER_URL_PROPERTIES = "http://www.genomespace.org/sites/genomespacefiles/config/serverurl.proper..."
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
CHUNK_SIZE = 2**20 #1mb
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: greg: Fix queries in the current user's lists of repositories that display the latest revision of each of their repositories with 1) missing tool test components 2) failing tool tests and 3) no failing tool tests. Add links for the same filtered lists for all users under the Review repositories with tools section in the tool shed menu.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9a21a5156b2e/
Changeset: 9a21a5156b2e
User: greg
Date: 2013-04-30 22:40:06
Summary: Fix queries in the current user's lists of repositories that display the latest revision of each of their repositories with 1) missing tool test components 2) failing tool tests and 3) no failing tool tests. Add links for the same filtered lists for all users under the Review repositories with tools section in the tool shed menu.
Affected #: 5 files
diff -r f44f6beea3daf77bd48fdb8a657f7cb593b72b6f -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -65,6 +65,9 @@
repositories_by_user_grid = repository_grids.RepositoriesByUserGrid()
repositories_i_own_grid = repository_grids.RepositoriesIOwnGrid()
repositories_in_category_grid = repository_grids.RepositoriesInCategoryGrid()
+ repositories_missing_tool_test_components_grid = repository_grids.RepositoriesMissingToolTestComponentsGrid()
+ repositories_with_failing_tool_tests_grid = repository_grids.RepositoriesWithFailingToolTestsGrid()
+ repositories_with_no_failing_tool_tests_grid = repository_grids.RepositoriesWithNoFailingToolTestsGrid()
repository_dependencies_grid = repository_grids.RepositoryDependenciesGrid()
repository_grid = repository_grids.RepositoryGrid()
# The repository_metadata_grid is not currently displayed, but is sub-classed by several grids.
@@ -121,6 +124,23 @@
return self.datatypes_grid( trans, **kwd )
@web.expose
+ def browse_deprecated_repositories_i_own( self, trans, **kwd ):
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if operation == "view_or_manage_repository":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_or_manage_repository',
+ **kwd ) )
+ selected_changeset_revision, repository = self.__get_repository_from_refresh_on_change( trans, **kwd )
+ if repository:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ operation='view_or_manage_repository',
+ id=trans.security.encode_id( repository.id ),
+ changeset_revision=selected_changeset_revision ) )
+ return self.deprecated_repositories_i_own_grid( trans, **kwd )
+
+ @web.expose
def browse_invalid_tools( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -188,6 +208,34 @@
return self.my_writable_repositories_grid( trans, **kwd )
@web.expose
+ def browse_my_writable_repositories_missing_tool_test_components( self, trans, **kwd ):
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if operation == "view_or_manage_repository":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_or_manage_repository',
+ **kwd ) )
+ elif operation == "repositories_by_user":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories_by_user',
+ **kwd ) )
+ elif operation in [ 'mark as deprecated', 'mark as not deprecated' ]:
+ kwd[ 'mark_deprecated' ] = operation == 'mark as deprecated'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='deprecate',
+ **kwd ) )
+ if 'message' not in kwd:
+ message = 'This list contains repositories that match the following criteria:<br>'
+ message += '<ul>'
+ message += '<li>you are authorized to update them</li>'
+ message += '<li>the latest installable revision contains at least 1 tool with no defined tests <b>OR</b>:</li>'
+ message += '<li>the latest installable revision contains at least 1 tool with a test that requires a missing test data file</li>'
+ message += '</ul>'
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'warning'
+ return self.my_writable_repositories_missing_tool_test_components_grid( trans, **kwd )
+
+ @web.expose
def browse_my_writable_repositories_with_failing_tool_tests( self, trans, **kwd ):
if 'operation' in kwd:
operation = kwd[ 'operation' ].lower()
@@ -246,44 +294,6 @@
return self.my_writable_repositories_with_no_failing_tool_tests_grid( trans, **kwd )
@web.expose
- def browse_my_writable_repositories_missing_tool_test_components( self, trans, **kwd ):
- if 'operation' in kwd:
- operation = kwd[ 'operation' ].lower()
- if operation == "view_or_manage_repository":
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='view_or_manage_repository',
- **kwd ) )
- elif operation == "repositories_by_user":
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories_by_user',
- **kwd ) )
- elif operation in [ 'mark as deprecated', 'mark as not deprecated' ]:
- kwd[ 'mark_deprecated' ] = operation == 'mark as deprecated'
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='deprecate',
- **kwd ) )
- if 'message' not in kwd:
- message = 'This list contains repositories that match the following criteria:<br>'
- message += '<ul>'
- message += '<li>you are authorized to update them</li>'
- message += '<li>the latest installable revision contains at least 1 tool with no defined tests <b>OR</b>:</li>'
- message += '<li>the latest installable revision contains at least 1 tool with a test that requires a missing test data file</li>'
- message += '</ul>'
- kwd[ 'message' ] = message
- kwd[ 'status' ] = 'warning'
- return self.my_writable_repositories_missing_tool_test_components_grid( trans, **kwd )
-
- @web.expose
- def browse_deprecated_repositories_i_own( self, trans, **kwd ):
- if 'operation' in kwd:
- operation = kwd[ 'operation' ].lower()
- if operation == "view_or_manage_repository":
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='view_or_manage_repository',
- **kwd ) )
- return self.deprecated_repositories_i_own_grid( trans, **kwd )
-
- @web.expose
def browse_repositories( self, trans, **kwd ):
# We add params to the keyword dict in this method in order to rename the param with an "f-" prefix, simulating filtering by clicking a search
# link. We have to take this approach because the "-" character is illegal in HTTP requests.
@@ -427,6 +437,89 @@
return self.repositories_in_category_grid( trans, **kwd )
@web.expose
+ def browse_repositories_missing_tool_test_components( self, trans, **kwd ):
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if operation == "view_or_manage_repository":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_or_manage_repository',
+ **kwd ) )
+ elif operation == "repositories_by_user":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories_by_user',
+ **kwd ) )
+ elif operation in [ 'mark as deprecated', 'mark as not deprecated' ]:
+ kwd[ 'mark_deprecated' ] = operation == 'mark as deprecated'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='deprecate',
+ **kwd ) )
+ if 'message' not in kwd:
+ message = 'This list contains repositories that match the following criteria:<br>'
+ message += '<ul>'
+ message += '<li>the latest installable revision contains at least 1 tool with no defined tests <b>OR</b>:</li>'
+ message += '<li>the latest installable revision contains at least 1 tool with a test that requires a missing test data file</li>'
+ message += '</ul>'
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'warning'
+ return self.repositories_missing_tool_test_components_grid( trans, **kwd )
+
+ @web.expose
+ def browse_repositories_with_failing_tool_tests( self, trans, **kwd ):
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if operation == "view_or_manage_repository":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_or_manage_repository',
+ **kwd ) )
+ elif operation == "repositories_by_user":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories_by_user',
+ **kwd ) )
+ elif operation in [ 'mark as deprecated', 'mark as not deprecated' ]:
+ kwd[ 'mark_deprecated' ] = operation == 'mark as deprecated'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='deprecate',
+ **kwd ) )
+ if 'message' not in kwd:
+ message = 'This list contains repositories that match the following criteria:<br>'
+ message += '<ul>'
+ message += '<li>the latest installable revision contains at least 1 tool</li>'
+ message += '<li>the latest installable revision is not missing any tool test components</li>'
+ message += '<li>the latest installable revision has at least 1 tool test that fails</li>'
+ message += '</ul>'
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'warning'
+ return self.repositories_with_failing_tool_tests_grid( trans, **kwd )
+
+ @web.expose
+ def browse_repositories_with_no_failing_tool_tests( self, trans, **kwd ):
+ if 'operation' in kwd:
+ operation = kwd[ 'operation' ].lower()
+ if operation == "view_or_manage_repository":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_or_manage_repository',
+ **kwd ) )
+ elif operation == "repositories_by_user":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories_by_user',
+ **kwd ) )
+ elif operation in [ 'mark as deprecated', 'mark as not deprecated' ]:
+ kwd[ 'mark_deprecated' ] = operation == 'mark as deprecated'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='deprecate',
+ **kwd ) )
+ if 'message' not in kwd:
+ message = 'This list contains repositories that match the following criteria:<br>'
+ message += '<ul>'
+ message += '<li>the latest installable revision contains at least 1 tool</li>'
+ message += '<li>the latest installable revision is not missing any tool test components</li>'
+ message += '<li>the latest installable revision has no tool tests that fail</li>'
+ message += '</ul>'
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'warning'
+ return self.repositories_with_no_failing_tool_tests_grid( trans, **kwd )
+
+ @web.expose
def browse_repository( self, trans, id, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
diff -r f44f6beea3daf77bd48fdb8a657f7cb593b72b6f -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 lib/tool_shed/grids/repository_grids.py
--- a/lib/tool_shed/grids/repository_grids.py
+++ b/lib/tool_shed/grids/repository_grids.py
@@ -1,17 +1,24 @@
-import os, logging
+import logging
+import os
from galaxy.webapps.tool_shed import model
from galaxy.web.framework.helpers import grids
-from galaxy.model.orm import and_, or_
+from galaxy.model.orm import and_
+from galaxy.model.orm import or_
from galaxy.util import json
import tool_shed.util.shed_util_common as suc
import tool_shed.grids.util as grids_util
from tool_shed.util import metadata_util
from galaxy import eggs
+
eggs.require('markupsafe')
from markupsafe import escape as escape_html
+
eggs.require('mercurial')
-from mercurial import hg, ui, patch, commands
+from mercurial import commands
+from mercurial import hg
+from mercurial import patch
+from mercurial import ui
log = logging.getLogger( __name__ )
@@ -462,7 +469,7 @@
class MyWritableRepositoriesGrid( RepositoryGrid ):
# This grid filters out repositories that have been marked as either deprecated or deleted.
- title = 'Repositories that I can change'
+ title = 'Repositories I can change'
columns = [
RepositoryGrid.NameColumn( "Name",
key="name",
@@ -505,19 +512,19 @@
.filter( model.Repository.table.c.id < 0 )
-class MyWritableRepositoriesMissingToolTestComponentsGrid( MyWritableRepositoriesGrid ):
- title = "Repositories that I can change with missing tool test components"
+class RepositoriesMissingToolTestComponentsGrid( RepositoryGrid ):
+ title = "Repositories with missing tool test components"
columns = [
- RepositoriesIOwnGrid.NameColumn( "Name",
- key="name",
- link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
- attach_popup=True ),
+ RepositoryGrid.NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+ attach_popup=False ),
RepositoryGrid.LatestInstallableRevisionColumn( "Latest Installable Revision" ),
RepositoryGrid.UserColumn( "Owner",
+ key="User.username",
model_class=model.User,
link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
- attach_popup=False,
- key="User.username" )
+ attach_popup=False )
]
columns.append( grids.MulticolFilterColumn( "Search repository name",
cols_to_filter=[ columns[0] ],
@@ -528,6 +535,29 @@
use_paging = False
def build_initial_query( self, trans, **kwd ):
+ # Filter by latest installable revisions that contain tools with missing tool test components.
+ revision_clause_list = []
+ for repository in trans.sa_session.query( model.Repository ):
+ changeset_revision = filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
+ if changeset_revision:
+ revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+ if revision_clause_list:
+ return trans.sa_session.query( model.Repository ) \
+ .join( model.RepositoryMetadata ) \
+ .filter( or_( *revision_clause_list ) ) \
+ .join( model.User.table )
+ # Return an empty query.
+ return trans.sa_session.query( model.Repository ) \
+ .filter( model.Repository.table.c.id < 0 )
+
+
+class MyWritableRepositoriesMissingToolTestComponentsGrid( RepositoriesMissingToolTestComponentsGrid ):
+ title = "Repositories I can change with missing tool test components"
+ columns = [ col for col in RepositoriesMissingToolTestComponentsGrid.columns ]
+ operations = []
+ use_paging = False
+
+ def build_initial_query( self, trans, **kwd ):
# First get all repositories that the current user is authorized to update.
username = trans.user.username
user_clause_list = []
@@ -545,8 +575,8 @@
revision_clause_list = []
for repository in trans.sa_session.query( model.Repository ) \
.filter( or_( *user_clause_list ) ):
- changeset_revision = suc.filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
- if changeset_revision not in [ None, suc.INITIAL_CHANGELOG_HASH ]:
+ changeset_revision = filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
+ if changeset_revision:
revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
if revision_clause_list:
return trans.sa_session.query( model.Repository ) \
@@ -559,19 +589,19 @@
.filter( model.Repository.table.c.id < 0 )
-class MyWritableRepositoriesWithFailingToolTestsGrid( MyWritableRepositoriesMissingToolTestComponentsGrid ):
- title = "Repositories that I can change with failing tool tests"
+class RepositoriesWithFailingToolTestsGrid( RepositoryGrid ):
+ title = "Repositories with failing tool tests"
columns = [
- RepositoriesIOwnGrid.NameColumn( "Name",
- key="name",
- link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
- attach_popup=True ),
+ RepositoryGrid.NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+ attach_popup=False ),
RepositoryGrid.LatestInstallableRevisionColumn( "Latest Installable Revision" ),
RepositoryGrid.UserColumn( "Owner",
+ key="User.username",
model_class=model.User,
link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
- attach_popup=False,
- key="User.username" )
+ attach_popup=False )
]
columns.append( grids.MulticolFilterColumn( "Search repository name",
cols_to_filter=[ columns[0] ],
@@ -582,6 +612,29 @@
use_paging = False
def build_initial_query( self, trans, **kwd ):
+ # Filter by latest installable revisions that contain tools with at least 1 failing tool test.
+ revision_clause_list = []
+ for repository in trans.sa_session.query( model.Repository ):
+ changeset_revision = filter_by_latest_downloadable_changeset_revision_that_has_failing_tool_tests( trans, repository )
+ if changeset_revision:
+ revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+ if revision_clause_list:
+ return trans.sa_session.query( model.Repository ) \
+ .join( model.RepositoryMetadata ) \
+ .filter( or_( *revision_clause_list ) ) \
+ .join( model.User.table )
+ # Return an empty query.
+ return trans.sa_session.query( model.Repository ) \
+ .filter( model.Repository.table.c.id < 0 )
+
+
+class MyWritableRepositoriesWithFailingToolTestsGrid( RepositoriesWithFailingToolTestsGrid ):
+ title = "Repositories I can change with failing tool tests"
+ columns = [ col for col in RepositoriesWithFailingToolTestsGrid.columns ]
+ operations = []
+ use_paging = False
+
+ def build_initial_query( self, trans, **kwd ):
# First get all repositories that the current user is authorized to update.
username = trans.user.username
user_clause_list = []
@@ -595,38 +648,37 @@
user_clause_list.append( model.Repository.table.c.id == repository.id )
if user_clause_list:
# We have the list of repositories that the current user is authorized to update, so filter further by latest installable revisions that contain
- # tools with missing tool test components.
+ # tools with at least 1 failing tool test.
revision_clause_list = []
for repository in trans.sa_session.query( model.Repository ) \
.filter( or_( *user_clause_list ) ):
- changeset_revision = suc.filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
- if changeset_revision not in [ None, suc.INITIAL_CHANGELOG_HASH ]:
+ changeset_revision = filter_by_latest_downloadable_changeset_revision_that_has_failing_tool_tests( trans, repository )
+ if changeset_revision:
revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
if revision_clause_list:
return trans.sa_session.query( model.Repository ) \
.join( model.User.table ) \
.filter( or_( *user_clause_list ) ) \
.join( model.RepositoryMetadata ) \
- .filter( or_( *revision_clause_list ) ) \
- .filter( model.RepositoryMetadata.table.c.tools_functionally_correct == False )
+ .filter( or_( *revision_clause_list ) )
# Return an empty query.
return trans.sa_session.query( model.Repository ) \
.filter( model.Repository.table.c.id < 0 )
-class MyWritableRepositoriesWithNoFailingToolTestsGrid( MyWritableRepositoriesMissingToolTestComponentsGrid ):
- title = "Repositories that I can change with failing tool tests"
+class RepositoriesWithNoFailingToolTestsGrid( RepositoryGrid ):
+ title = "Repositories with no failing tool tests"
columns = [
- RepositoriesIOwnGrid.NameColumn( "Name",
- key="name",
- link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
- attach_popup=True ),
+ RepositoryGrid.NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+ attach_popup=False ),
RepositoryGrid.LatestInstallableRevisionColumn( "Latest Installable Revision" ),
RepositoryGrid.UserColumn( "Owner",
+ key="User.username",
model_class=model.User,
link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
- attach_popup=False,
- key="User.username" )
+ attach_popup=False )
]
columns.append( grids.MulticolFilterColumn( "Search repository name",
cols_to_filter=[ columns[0] ],
@@ -637,6 +689,30 @@
use_paging = False
def build_initial_query( self, trans, **kwd ):
+ # We have the list of repositories that the current user is authorized to update, so filter further by latest installable revisions that contain
+ # tools with at least 1 failing tool test.
+ revision_clause_list = []
+ for repository in trans.sa_session.query( model.Repository ):
+ changeset_revision = filter_by_latest_downloadable_changeset_revision_that_has_no_failing_tool_tests( trans, repository )
+ if changeset_revision:
+ revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+ if revision_clause_list:
+ return trans.sa_session.query( model.Repository ) \
+ .join( model.RepositoryMetadata ) \
+ .filter( or_( *revision_clause_list ) ) \
+ .join( model.User.table )
+ # Return an empty query.
+ return trans.sa_session.query( model.Repository ) \
+ .filter( model.Repository.table.c.id < 0 )
+
+
+class MyWritableRepositoriesWithNoFailingToolTestsGrid( RepositoriesWithNoFailingToolTestsGrid ):
+ title = "Repositories I can change with no failing tool tests"
+ columns = [ col for col in RepositoriesWithNoFailingToolTestsGrid.columns ]
+ operations = []
+ use_paging = False
+
+ def build_initial_query( self, trans, **kwd ):
# First get all repositories that the current user is authorized to update.
username = trans.user.username
user_clause_list = []
@@ -650,20 +726,19 @@
user_clause_list.append( model.Repository.table.c.id == repository.id )
if user_clause_list:
# We have the list of repositories that the current user is authorized to update, so filter further by latest installable revisions that contain
- # tools with missing tool test components.
+ # at least 1 tool, no missing tool test components, and no failing tool tests.
revision_clause_list = []
for repository in trans.sa_session.query( model.Repository ) \
.filter( or_( *user_clause_list ) ):
- changeset_revision = suc.filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
- if changeset_revision not in [ None, suc.INITIAL_CHANGELOG_HASH ]:
+ changeset_revision = filter_by_latest_downloadable_changeset_revision_that_has_no_failing_tool_tests( trans, repository )
+ if changeset_revision:
revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
if revision_clause_list:
return trans.sa_session.query( model.Repository ) \
.join( model.User.table ) \
.filter( or_( *user_clause_list ) ) \
.join( model.RepositoryMetadata ) \
- .filter( or_( *revision_clause_list ) ) \
- .filter( model.RepositoryMetadata.table.c.tools_functionally_correct == True )
+ .filter( or_( *revision_clause_list ) )
# Return an empty query.
return trans.sa_session.query( model.Repository ) \
.filter( model.Repository.table.c.id < 0 )
@@ -1259,3 +1334,62 @@
model.Repository.table.c.deleted == False,
model.Repository.table.c.deprecated == False ) ) \
.join( model.User.table )
+
+# ------ utility methods -------------------
+
+def filter_by_latest_downloadable_changeset_revision_that_has_failing_tool_tests( trans, repository ):
+ """
+ Inspect the latest installable changeset revision for the received repository to see if it includes at least 1 tool that has at least 1 failing test.
+ """
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
+ repository_metadata = get_latest_installable_repository_metadata_if_it_includes_tools( trans, repository )
+ if repository_metadata and not repository_metadata.tools_functionally_correct:
+ return repository_metadata.changeset_revision
+ return None
+
+def filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository ):
+ """
+ Inspect the latest installable changeset revision for the received repository to see if it includes tools that are either missing functional tests
+ or functional test data. If the changset revision includes tools, but is missing tool test components, return the changeset revision hash.
+ """
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
+ repository_metadata = get_latest_installable_repository_metadata_if_it_includes_tools( trans, repository )
+ if repository_metadata and repository_metadata.missing_test_components:
+ return repository_metadata.changeset_revision
+ return None
+
+def filter_by_latest_downloadable_changeset_revision_that_has_no_failing_tool_tests( trans, repository ):
+ """
+ Inspect the latest installable changeset revision for the received repository to see if it includes tools with no failing tests.
+ """
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
+ repository_metadata = get_latest_installable_repository_metadata_if_it_includes_tools( trans, repository )
+ if repository_metadata and not repository_metadata.missing_test_components and repository_metadata.tools_functionally_correct:
+ return repository_metadata.changeset_revision
+ return None
+
+def get_latest_installable_repository_metadata_if_it_includes_tools( trans, repository ):
+ """Return the latest installable repository_metadata record for the received repository if one exists."""
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
+ repository_metadata = None
+ try:
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, tip_ctx )
+ if repository_metadata and repository_metadata.includes_tools and repository_metadata.downloadable:
+ return repository_metadata
+ return None
+ except:
+ latest_installable_revision = suc.get_previous_downloadable_changeset_revision( repository, repo, tip_ctx )
+ if latest_installable_revision == suc.INITIAL_CHANGELOG_HASH:
+ return None
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, latest_installable_revision )
+ if repository_metadata and repository_metadata.includes_tools and repository_metadata.downloadable:
+ return repository_metadata
+ return None
diff -r f44f6beea3daf77bd48fdb8a657f7cb593b72b6f -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -255,29 +255,6 @@
sa_session.flush()
return tool_shed_repository
-def filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository ):
- """
- Inspect the latest installable changeset revision for the received repository to see if it includes tool that are either missing functional tests
- or functional test data. If the changset revision includes tools, but is missing tool test components, return the changeset revision hash. Otherwise
- return the INITIAL_CHANGELOG_HASH.
- """
- print_debug = repository.name == 'convert_chars'
- encoded_repository_id = trans.security.encode_id( repository.id )
- repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
- tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
- repository_metadata = None
- try:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, tip_ctx )
- except:
- latest_installable_revision = get_previous_downloadable_changeset_revision( repository, repo, tip_ctx )
- if latest_installable_revision != INITIAL_CHANGELOG_HASH:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, latest_installable_revision )
- else:
- repository_metadata = None
- if repository_metadata and repository_metadata.downloadable and repository_metadata.missing_test_components:
- return repository_metadata.changeset_revision
- return INITIAL_CHANGELOG_HASH
-
def generate_clone_url_for_installed_repository( app, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_tool_shed( app, repository.tool_shed )
diff -r f44f6beea3daf77bd48fdb8a657f7cb593b72b6f -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 templates/webapps/tool_shed/admin/index.mako
--- a/templates/webapps/tool_shed/admin/index.mako
+++ b/templates/webapps/tool_shed/admin/index.mako
@@ -96,9 +96,18 @@
Reviewing Repositories With Tools
</div><div class="toolSectionBody">
- <div class="toolSectionBg">
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repositories_with_invalid_tests' )}">Repositories missing tests or data</a>
+ <div class="toolSectionPad"></div>
+ <div class="toolSectionBody">
+ <div class="toolSectionBg">
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_missing_tool_test_components' )}">Latest revision missing tool tests</a>
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_failing_tool_tests' )}">Latest revision failing tool tests</a>
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_no_failing_tool_tests' )}">Latest revision all tool tests pass</a>
+ </div></div></div></div>
diff -r f44f6beea3daf77bd48fdb8a657f7cb593b72b6f -r 9a21a5156b2efd6b4a169a62bd663bf69fdadf79 templates/webapps/tool_shed/index.mako
--- a/templates/webapps/tool_shed/index.mako
+++ b/templates/webapps/tool_shed/index.mako
@@ -164,7 +164,13 @@
<div class="toolSectionBody"><div class="toolSectionBg"><div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repositories_with_invalid_tests' )}">Repositories missing tests or data</a>
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_missing_tool_test_components' )}">Latest revision missing tool tests</a>
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_failing_tool_tests' )}">Latest revision failing tool tests</a>
+ </div>
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_no_failing_tool_tests' )}">Latest revision all tool tests pass</a></div></div></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: jgoecks: Clean up for copying datasets to current history.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f44f6beea3da/
Changeset: f44f6beea3da
User: jgoecks
Date: 2013-04-30 20:36:20
Summary: Clean up for copying datasets to current history.
Affected #: 1 file
diff -r 4c7b5f4d569d478fd83fbfa3bf8d2f0187a30c1a -r f44f6beea3daf77bd48fdb8a657f7cb593b72b6f lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -546,8 +546,14 @@
hda_ids = [ trans.security.encode_id( hda.id ) for hda in hdas ]
trans.template_context[ 'seek_hda_ids' ] = hda_ids
elif operation == "copy to current history":
- # Copy a dataset to the current history.
+ #
+ # Copy datasets to the current history.
+ #
+
target_histories = [ trans.get_history() ]
+
+ # Reverse HDAs so that they appear in the history in the order they are provided.
+ hda_ids.reverse()
status, message = self._copy_datasets( trans, hda_ids, target_histories )
# Current history changed, refresh history frame.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: jgoecks: Clean up for inheritance chain display.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4c7b5f4d569d/
Changeset: 4c7b5f4d569d
User: jgoecks
Date: 2013-04-30 20:15:27
Summary: Clean up for inheritance chain display.
Affected #: 1 file
diff -r 3a6d4a323e3ae7764a6a3704cf35b9dcb9b315f2 -r 4c7b5f4d569d478fd83fbfa3bf8d2f0187a30c1a templates/show_params.mako
--- a/templates/show_params.mako
+++ b/templates/show_params.mako
@@ -153,7 +153,9 @@
<div class="inherit" style="background-color: #fff; font-weight:bold;">${hda.name | h}</div>
% for dep in inherit_chain:
- <div style="font-size: 36px; text-align: center;">↑</div>
- <div class="inherit">${dep[0].name | h}<br/>${dep[1]}</div>
+ <div style="font-size: 36px; text-align: center; position: relative; top: 3px">↑</div>
+ <div class="inherit">
+ '${dep[0].name | h}' in ${dep[1]}<br/>
+ </div>
% endfor
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: carlfeberhard: Datasets API: better error handling on api/datasets/display
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3a6d4a323e3a/
Changeset: 3a6d4a323e3a
User: carlfeberhard
Date: 2013-04-30 20:06:12
Summary: Datasets API: better error handling on api/datasets/display
Affected #: 1 file
diff -r 746d59a574bbccdf5cedfbc7bd25a50b37d6667f -r 3a6d4a323e3ae7764a6a3704cf35b9dcb9b315f2 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -68,8 +68,9 @@
return rval
def _dataset_state( self, trans, dataset, **kwargs ):
- """ Returns state of dataset. """
-
+ """
+ Returns state of dataset.
+ """
msg = self.check_dataset_state( trans, dataset )
if not msg:
msg = dataset.conversion_messages.DATA
@@ -216,7 +217,7 @@
Displays history content (dataset).
"""
# Huge amount of code overlap with lib/galaxy/webapps/galaxy/api/history_content:show here.
- hda_dict = {}
+ rval = ''
try:
# for anon users:
#TODO: check login_required?
@@ -233,6 +234,13 @@
check_ownership=True, check_accessible=True, deleted=False )
hda = self.get_history_dataset_association( trans, history, history_content_id,
check_ownership=True, check_accessible=True )
- except:
- raise
- return hda.datatype.display_data(trans, hda, preview, filename, to_ext, chunk, **kwd)
+
+ rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, chunk, **kwd )
+
+ except Exception, exception:
+ log.error( "Error getting display data for dataset (%s) from history (%s): %s",
+ history_content_id, history_id, str( exception ), exc_info=True )
+ trans.response.status = 500
+ rval = ( "Could not get display data for dataset: " + str( exception ) )
+
+ return rval
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: carlfeberhard: Datasets API: import cleanup, misc. cleanup
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/746d59a574bb/
Changeset: 746d59a574bb
User: carlfeberhard
Date: 2013-04-30 19:56:11
Summary: Datasets API: import cleanup, misc. cleanup
Affected #: 1 file
diff -r 07f09e119bfcbe8e4cfda3400c8892f8fb3891d2 -r 746d59a574bbccdf5cedfbc7bd25a50b37d6667f lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -1,15 +1,17 @@
"""
API operations on the contents of a dataset.
"""
-import logging, os, string, shutil, urllib, re, socket
-from galaxy import util, datatypes, jobs, web, util
+from galaxy import web
from galaxy.visualization.data_providers.genome import FeatureLocationIndexDataProvider
-from galaxy.web.base.controller import BaseAPIController, UsesVisualizationMixin, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin
+from galaxy.web.base.controller import BaseAPIController, UsesVisualizationMixin, UsesHistoryDatasetAssociationMixin
+from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.web.framework.helpers import is_true
+import logging
log = logging.getLogger( __name__ )
-class DatasetsController( BaseAPIController, UsesVisualizationMixin, UsesHistoryMixin, UsesHistoryDatasetAssociationMixin ):
+class DatasetsController( BaseAPIController, UsesVisualizationMixin, UsesHistoryMixin,
+ UsesHistoryDatasetAssociationMixin ):
@web.expose_api
def index( self, trans, **kwd ):
@@ -17,7 +19,8 @@
GET /api/datasets
Lists datasets.
"""
- pass
+ trans.response.status = 501
+ return 'not implemented'
@web.expose_api
def show( self, trans, id, hda_ldda='hda', data_type=None, **kwd ):
@@ -25,7 +28,6 @@
GET /api/datasets/{encoded_dataset_id}
Displays information about and/or content of a dataset.
"""
-
# Get dataset.
try:
dataset = self.get_hda_or_ldda( trans, hda_ldda=hda_ldda, dataset_id=id )
@@ -98,7 +100,8 @@
# If there is a chrom, check for data on the chrom.
if chrom:
data_provider_registry = trans.app.data_provider_registry
- data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset, source='index' )
+ data_provider = trans.app.data_provider_registry.get_data_provider( trans,
+ original_dataset=dataset, source='index' )
if not data_provider.has_data( chrom ):
return dataset.conversion_messages.NO_DATA
@@ -118,13 +121,11 @@
return data_provider.get_data( query )
return []
-
def _data( self, trans, dataset, chrom, low, high, start_val=0, max_vals=None, **kwargs ):
"""
Provides a block of data from a dataset.
"""
-
# Parameter check.
if not chrom:
return dataset.conversion_messages.NO_DATA
@@ -151,7 +152,8 @@
summary = indexer.get_data( chrom, low, high, detail_cutoff=0, draw_cutoff=0, **kwargs )
if summary == "detail":
# Use maximum level of detail--2--to get summary data no matter the resolution.
- summary = indexer.get_data( chrom, low, high, resolution=kwargs[ 'resolution' ], level=2, detail_cutoff=0, draw_cutoff=0 )
+ summary = indexer.get_data( chrom, low, high, resolution=kwargs[ 'resolution' ],
+ level=2, detail_cutoff=0, draw_cutoff=0 )
return summary
if 'index' in data_sources and data_sources['index']['name'] == "summary_tree" and mode == "Auto":
@@ -207,7 +209,8 @@
return data
@web.expose_api_raw
- def display( self, trans, history_content_id, history_id, preview=False, filename=None, to_ext=None, chunk=None, **kwd ):
+ def display( self, trans, history_content_id, history_id,
+ preview=False, filename=None, to_ext=None, chunk=None, **kwd ):
"""
GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}/display
Displays history content (dataset).
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: inithello: Update check strings in functional tests to reflect recent changes to the UI.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/07f09e119bfc/
Changeset: 07f09e119bfc
User: inithello
Date: 2013-04-30 19:41:26
Summary: Update check strings in functional tests to reflect recent changes to the UI.
Affected #: 1 file
diff -r 6b996d66ce37e20de68d6f02f32355c026a5324b -r 07f09e119bfcbe8e4cfda3400c8892f8fb3891d2 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -118,14 +118,17 @@
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
self.set_repository_deprecated( repository,
- strings_displayed=[ 'has been marked as deprecated', 'Mark as not deprecated' ] )
+ strings_displayed=[ 'has been marked as deprecated' ] )
+ strings_displayed = [ 'This repository has been marked as deprecated', 'Mark repository as not deprecated' ]
self.display_manage_repository_page( repository,
- strings_displayed=[ 'This repository has been marked as deprecated' ],
+ strings_displayed=strings_displayed,
strings_not_displayed=[ 'Upload files', 'Reset all repository metadata' ] )
self.browse_repository( repository, strings_not_displayed=[ 'Upload files' ] )
self.set_repository_deprecated( repository,
- strings_displayed=[ 'has been marked as not deprecated', 'Mark as deprecated' ],
+ strings_displayed=[ 'has been marked as not deprecated' ],
set_deprecated=False )
+ strings_displayed = [ 'Mark repository as deprecated', 'Upload files', 'Reset all repository metadata' ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
def test_0045_display_repository_tip_file( self ):
'''Display the contents of filtering.xml in the repository tip revision'''
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: carlfeberhard: Browser tests: first draft at running a workflow from the API, remove screen-caps from tools, fix POST method in API interface
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6b996d66ce37/
Changeset: 6b996d66ce37
User: carlfeberhard
Date: 2013-04-30 17:05:18
Summary: Browser tests: first draft at running a workflow from the API, remove screen-caps from tools, fix POST method in API interface
Affected #: 3 files
diff -r a27856f42a616378bf5b152a06707d1d8c24e66d -r 6b996d66ce37e20de68d6f02f32355c026a5324b test/casperjs/api-workflow-tests.js
--- a/test/casperjs/api-workflow-tests.js
+++ b/test/casperjs/api-workflow-tests.js
@@ -43,57 +43,245 @@
return true;
}
-function compareObjs( obj1, where ){
- for( var key in where ){
- if( where.hasOwnProperty( key ) ){
- if( !obj1.hasOwnProperty( key ) ){ return false; }
- if( obj1[ key ] !== where[ key ] ){ return false; }
- }
+function countKeys( object ){
+ if( !utils.isObject( object ) ){ return 0; }
+ var count = 0;
+ for( var key in object ){
+ if( object.hasOwnProperty( key ) ){ count += 1; }
}
- return true;
-}
-
-function findObject( objectArray, where, start ){
- start = start || 0;
- for( var i=start; i<objectArray.length; i += 1 ){
- if( compareObjs( objectArray[i], where ) ){ return objectArray[i]; }
- }
- return null;
+ return count;
}
// =================================================================== TESTS
-var workflowSummaryKeys = [
+var workflowJSONFilepath = 'test-data/Bed_interval_lengths.ga',
+ workflowModelClass = 'StoredWorkflow',
+ workflowSummaryKeys = [
'id', 'model_class', 'name', 'published', 'tags', 'url'
],
workflowDetailKeys = workflowSummaryKeys.concat([
'inputs', 'steps'
- ]);
+ ]),
+ stepKeys = [
+ 'id', 'input_steps', 'tool_id', 'type'
+ ],
+ simpleBedFilepath = 'test-data/simple.bed',
+ uploadedFile = null,
+ workflowCreateKeys = [ 'history', 'outputs' ];
spaceghost.thenOpen( spaceghost.baseUrl ).then( function(){
+ // ------------------------------------------------------------------------------------------- UPLOAD
+ // upload first or we have no data to test
+ this.test.comment( 'upload should allow importing a new workflow given in JSON form' );
+ var workflowToUpload = this.loadJSONFile( workflowJSONFilepath );
+ //this.debug( this.jsonStr( workflowToUpload ) );
+ var returned = this.api.workflows.upload( workflowToUpload );
+
+ this.test.comment( 'upload should return a summary object of what we uploaded' );
+ //this.debug( this.jsonStr( returned ) );
+ this.test.assert( utils.isObject( returned ), "upload returned an object" );
+ this.test.assert( hasKeys( returned, workflowSummaryKeys ), "upload's return has the proper keys" );
+ this.test.assert( this.api.isEncodedId( returned.id ),
+ "id is of the proper form: " + returned.id );
+ this.test.assert( returned.model_class === workflowModelClass,
+ "has the proper model_class: " + returned.model_class );
+ this.test.assert( returned.name === workflowToUpload.name + ' ' + '(imported from API)',
+ "has the proper, modified name: " + returned.name );
+ this.test.assert( !returned.published,
+ "uploaded workflow defaults to un-published: " + returned.published );
+ this.test.assert( utils.isArray( returned.tags ) && returned.tags.length === 0,
+ "upload returned an empty tag array: " + this.jsonStr( returned.tags ) );
+ this.test.assert( returned.url === '/' + utils.format( this.api.workflows.urlTpls.show, returned.id ),
+ "url matches the show url: " + returned.url );
+
+
// ------------------------------------------------------------------------------------------- INDEX
this.test.comment( 'index should get a list of workflows' );
var workflowIndex = this.api.workflows.index();
this.debug( this.jsonStr( workflowIndex ) );
this.test.assert( utils.isArray( workflowIndex ), "index returned an array: length " + workflowIndex.length );
+ this.test.assert( workflowIndex.length >= 1, "index returned at least one job" );
- // need a way to import/create a workflow here for testing
- if( workflowIndex.length <= 0 ){
- log.warn( 'No workflows available' );
- return;
- }
- this.test.assert( workflowIndex.length >= 1, 'Has at least one workflow' );
+ this.test.comment( 'index should have returned an object matching the workflow uploaded' );
+ var firstWorkflow = workflowIndex[0];
+ this.test.assert( hasKeys( firstWorkflow, workflowSummaryKeys ), "index has the proper keys" );
+ this.test.assert( this.api.isEncodedId( firstWorkflow.id ),
+ "id is of the proper form: " + firstWorkflow.id );
+ this.test.assert( firstWorkflow.model_class === workflowModelClass,
+ "has the proper model_class: " + firstWorkflow.model_class );
+ this.test.assert( firstWorkflow.name === workflowToUpload.name + ' ' + '(imported from API)',
+ "has the proper, modified name: " + firstWorkflow.name );
+ this.test.assert( !firstWorkflow.published,
+ "workflow is un-published: " + firstWorkflow.published );
+ this.test.assert( utils.isArray( firstWorkflow.tags ) && firstWorkflow.tags.length === 0,
+ "tag array is empty: " + this.jsonStr( firstWorkflow.tags ) );
+ this.test.assert( firstWorkflow.url === '/' + utils.format( this.api.workflows.urlTpls.show, firstWorkflow.id ),
+ "url matches the show url: " + firstWorkflow.url );
+
// ------------------------------------------------------------------------------------------- SHOW
this.test.comment( 'show should get detailed data about the workflow with the given id' );
- var workflowShow = this.api.workflows.show( workflowIndex[0].id );
+ var workflowShow = this.api.workflows.show( firstWorkflow.id );
this.debug( this.jsonStr( workflowShow ) );
+ this.test.assert( utils.isObject( workflowShow ), "show returned an object" );
+ this.test.assert( hasKeys( workflowShow, workflowDetailKeys ), "show has the proper keys" );
+ this.test.assert( this.api.isEncodedId( workflowShow.id ),
+ "id is of the proper form: " + workflowShow.id );
+ this.test.assert( workflowShow.model_class === workflowModelClass,
+ "has the proper model_class: " + workflowShow.model_class );
+ this.test.assert( workflowShow.name === workflowToUpload.name + ' ' + '(imported from API)',
+ "has the proper, modified name: " + workflowShow.name );
+ this.test.assert( !workflowShow.published,
+ "workflow is un-published: " + workflowShow.published );
+ this.test.assert( utils.isArray( workflowShow.tags ) && workflowShow.tags.length === 0,
+ "tag array is empty: " + this.jsonStr( workflowShow.tags ) );
+ this.test.assert( workflowShow.url === '/' + utils.format( this.api.workflows.urlTpls.show, workflowShow.id ),
+ "url matches the show url: " + workflowShow.url );
+ this.test.comment( 'inputs from show should be an object (and, in this case, empty)' );
+ var inputs = workflowShow.inputs;
+ //this.debug( 'inputs:\n' + this.jsonStr( inputs ) );
+ this.test.assert( utils.isObject( workflowShow.inputs ), "inputs is an object" );
+ //for( var inputKey in inputs ){
+ // if( inputs.hasOwnProperty( inputKey ) ){
+ // }
+ //}
+ this.test.assert( countKeys( workflowShow.inputs ) === 0, "inputs is empty" );
+
+ this.test.comment( 'steps from show should be an object containing each tool defined as a step' );
+ var steps = workflowShow.steps;
+ //this.debug( 'steps:\n' + this.jsonStr( steps ) );
+ this.test.assert( utils.isObject( workflowShow.steps ), "steps is an object" );
+ //! ids for steps (and the keys used) are un-encoded (and in strings)
+ for( var stepKey in steps ){
+ if( steps.hasOwnProperty( stepKey ) ){
+ // any way to match this up with the workflowToUpload?
+
+ this.test.assert( utils.isString( stepKey ), "step key is a string: " + stepKey );
+ var step = steps[ stepKey ];
+ this.debug( 'step:\n' + this.jsonStr( step ) );
+ this.test.assert( hasKeys( step, stepKeys ), "step has the proper keys" );
+
+ this.test.assert( utils.isNumber( step.id ),
+ "step id is a number: " + step.id );
+ try {
+ this.test.assert( parseInt( stepKey, 10 ) === step.id,
+ "step id matches step key: " + step.id );
+ } catch( err ){
+ this.test.fail( 'couldnt parse stepKey: ' + stepKey + ',' + err );
+ }
+
+ this.test.assert( utils.isObject( step.input_steps ), "input_steps is an object" );
+ if( countKeys( step.input_steps ) !== 0 ){
+ this.test.assert( hasKeys( step.input_steps, [ 'input' ] ), "input_steps has the proper keys" );
+ }
+
+ this.test.assert( step.type === 'tool',
+ "step type is a tool: " + step.type );
+
+ // check for tools in this wf with the api
+ this.test.assert( utils.isString( step.tool_id ),
+ "step tool_id is a string: " + step.tool_id );
+ var tool_used = this.api.tools.show( step.tool_id );
+ //this.debug( this.jsonStr( tool_used ) )
+ this.test.assert( countKeys( step.input_steps ) !== 0, "found tool in api.tools for: " + step.tool_id );
+
+ // trace the path through input_steps, source_steps
+ }
+ }
+
+
+
+ // ------------------------------------------------------------------------------------------- MISC
+});
+
+// now run the uploaded workflow
+spaceghost.tools.uploadFile( simpleBedFilepath, function( uploadInfo ){
+ uploadedFile = uploadInfo;
+});
+spaceghost.then( function(){
+ var currentHistory = this.api.histories.index()[0],
+ firstWorkflow = this.api.workflows.show( this.api.workflows.index()[0].id );
+
+ //this.debug( this.jsonStr( uploadedFile ) );
+ var uploadedFileId = uploadedFile.hdaElement.attributes.id.split( '-' )[1];
+ this.debug( this.jsonStr( uploadedFileId ) );
+ this.debug( this.jsonStr( this.api.hdas.show( currentHistory.id, uploadedFileId ) ) );
+
+ //this.debug( this.jsonStr( firstWorkflow ) );
+ // find the input step by looking for a step where input_steps == {}
+ var input_step = null;
+ for( var stepKey in firstWorkflow.steps ){
+ if( firstWorkflow.steps.hasOwnProperty( stepKey ) ){
+ var step = firstWorkflow.steps[ stepKey ];
+ if( countKeys( step.input_steps ) === 0 ){
+ input_step = stepKey;
+ this.debug( 'input step: ' + this.jsonStr( step ) )
+ break;
+ }
+ }
+ }
// ------------------------------------------------------------------------------------------- CREATE
+ this.test.comment( 'create should allow running an existing workflow' );
+ // needs workflow_id, history, ds_map
+ var executionData = {
+ workflow_id : firstWorkflow.id,
+ history : 'hist_id=' + currentHistory.id,
+ ds_map : {}
+ };
+ executionData.ds_map[ input_step ] = {
+ src: 'hda',
+ id: uploadedFileId
+ };
+ var returned = this.api.workflows.create( executionData );
+ this.debug( this.jsonStr( returned ) );
+ this.test.assert( utils.isObject( returned ), "create returned an object" );
+ this.test.assert( hasKeys( returned, workflowCreateKeys ), "create returned the proper keys" );
+ this.test.assert( this.api.isEncodedId( returned.history ),
+ "history id is proper: " + returned.history );
+ this.test.assert( utils.isArray( returned.outputs ),
+ "create.outputs is an array: length " + returned.outputs.length );
+ this.test.assert( returned.outputs.length >= 1, "there is at least one output" );
+ for( var i=0; i<returned.outputs.length; i+=1 ){
+ this.test.assert( this.api.isEncodedId( returned.outputs[i] ),
+ "output id is proper: " + returned.outputs[i] );
+ }
- // ------------------------------------------------------------------------------------------- MISC
+ var counter = 0;
+ this.waitFor(
+ function checkHdas(){
+ if( counter % 4 !== 0 ){
+ counter += 1;
+ return false;
+ }
+ counter += 1;
+
+ var outputs = this.api.hdas.index( currentHistory.id, returned.outputs );
+ //this.debug( 'outputs:\n' + this.jsonStr( outputs ) );
+ for( var i=0; i<outputs.length; i+=1 ){
+ var output = outputs[i];
+ this.debug( utils.format( 'name: %s, state: %s', output.name, output.state ) );
+ if( output.state === 'queued' || output.state === 'running' ){
+ return false;
+ }
+ }
+ return true;
+ },
+ function allDone(){
+ this.debug( 'DONE' );
+ var outputs = this.api.hdas.index( currentHistory.id, returned.outputs );
+ this.debug( 'outputs:\n' + this.jsonStr( outputs ) );
+ },
+ function timeout(){
+ this.debug( 'timeout' );
+
+ },
+ 45 * 1000
+ );
+/*
+*/
});
// ===================================================================
diff -r a27856f42a616378bf5b152a06707d1d8c24e66d -r 6b996d66ce37e20de68d6f02f32355c026a5324b test/casperjs/modules/api.js
--- a/test/casperjs/modules/api.js
+++ b/test/casperjs/modules/api.js
@@ -58,7 +58,8 @@
// PUT data needs to be stringified in jq.ajax and the content changed
//TODO: server side handling could change this?
- if( ( options.type && options.type === 'PUT' ) && ( options.data ) ){
+ if( ( options.type && [ 'PUT', 'POST' ].indexOf( options.type ) !== -1 )
+ && ( options.data ) ){
options.contentType = 'application/json';
options.data = JSON.stringify( options.data );
}
@@ -384,39 +385,23 @@
});
};
-//WorkflowsAPI.prototype.create = function create( payload ){
-// this.api.spaceghost.info( 'workflows.create: ' + [ this.api.spaceghost.jsonStr( payload ) ] );
-//
-// // py.payload <-> ajax.data
-// payload = this.api.ensureObject( payload );
-// return this.api._ajax( utils.format( this.urlTpls.create ), {
-// type : 'POST',
-// data : payload
-// });
-//};
-//
-//WorkflowsAPI.prototype.update = function create( id, payload ){
-// this.api.spaceghost.info( 'workflows.update: ' + [ id, this.api.spaceghost.jsonStr( payload ) ] );
-//
-// // py.payload <-> ajax.data
-// historyId = this.api.ensureId( historyId );
-// id = this.api.ensureId( id );
-// payload = this.api.ensureObject( payload );
-// url = utils.format( this.urlTpls.update, id );
-//
-// return this.api._ajax( url, {
-// type : 'PUT',
-// data : payload
-// });
-//};
+WorkflowsAPI.prototype.create = function create( payload ){
+ this.api.spaceghost.info( 'workflows.create: ' + [ this.api.spaceghost.jsonStr( payload ) ] );
-WorkflowsAPI.prototype.upload = function upload( filepath ){
- this.api.spaceghost.info( 'workflows.show: ' + [ id ] );
- var data = {};
+ // py.payload <-> ajax.data
+ payload = this.api.ensureObject( payload );
+ return this.api._ajax( utils.format( this.urlTpls.create ), {
+ type : 'POST',
+ data : payload
+ });
+};
- id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
- return this.api._ajax( utils.format( this.urlTpls.show, this.api.ensureId( id ) ), {
- data : data
+WorkflowsAPI.prototype.upload = function create( workflowJSON ){
+ this.api.spaceghost.info( 'workflows.upload: ' + [ this.api.spaceghost.jsonStr( workflowJSON ) ] );
+
+ return this.api._ajax( utils.format( this.urlTpls.upload ), {
+ type : 'POST',
+ data : { 'workflow': this.api.ensureObject( workflowJSON ) }
});
};
diff -r a27856f42a616378bf5b152a06707d1d8c24e66d -r 6b996d66ce37e20de68d6f02f32355c026a5324b test/casperjs/modules/tools.js
--- a/test/casperjs/modules/tools.js
+++ b/test/casperjs/modules/tools.js
@@ -118,7 +118,6 @@
});
},
function timeoutWaitingForUploadRefreshes( urlsStillWaitingOn ){
- this.capture( 'upload-error.png' )
throw new this.GalaxyError( 'Upload Error: '
+ 'timeout waiting for upload "' + filepath + '" refreshes: ' + urlsStillWaitingOn );
},
@@ -178,7 +177,6 @@
// capture any other messages on the page
var otherInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.all ),
message = ( otherInfo && otherInfo.text )?( otherInfo.text ):( '' );
- this.capture( 'upload-error.png' )
throw new this.GalaxyError( 'Upload Error: no success message uploading "' + filepath + '": ' + message );
}
});
@@ -191,7 +189,6 @@
if( hdaElement === null ){
var hdaContainer = this.historypanel.data.selectors.hdaContainer;
this.warning( 'Upload Error: ' + hdaContainer + ':\n' + this.getHTML( hdaContainer ) );
- this.capture( 'upload-error.png' )
throw new this.GalaxyError( 'Upload Error: uploaded file HDA not found: ' + uploadInfo.filename );
}
this.debug( 'uploaded HDA element: ' + this.jsonStr( this.quickInfo( hdaElement ) ) );
@@ -206,7 +203,6 @@
}, function timeoutFn( newHdaInfo ){
this.warning( 'timeout waiting for upload:\n' + this.jsonStr( this.quickInfo( newHdaInfo ) ) );
- this.capture( 'upload-error.png' )
throw new spaceghost.GalaxyError( 'Upload Error: timeout waiting for ok state: '
+ '"' + uploadInfo.filepath + '" (waited ' + timeoutAfterMs + ' ms)' );
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: jgoecks: Trackster: refactoring to use dataset attributes rather than .dataset_id and .hda_ldda
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a27856f42a61/
Changeset: a27856f42a61
User: jgoecks
Date: 2013-04-30 15:12:18
Summary: Trackster: refactoring to use dataset attributes rather than .dataset_id and .hda_ldda
Affected #: 1 file
diff -r f8d07c98812903dc98de2665874a2c31b65b84da -r a27856f42a616378bf5b152a06707d1d8c24e66d static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -1534,7 +1534,7 @@
if (this.overview_drawable) {
// If drawable to be set as overview is already in overview, do nothing.
// Otherwise, remove overview.
- if (this.overview_drawable.dataset_id === drawable.dataset_id) {
+ if (this.overview_drawable.dataset.id === drawable.dataset.id) {
return;
}
this.overview_viewport.find(".track").remove();
@@ -1712,7 +1712,7 @@
tool.run(
// URL params.
{
- target_dataset_id: this.track.dataset_id,
+ target_dataset_id: this.track.dataset.id,
action: 'rerun',
tool_id: tool.id
},
@@ -1741,7 +1741,7 @@
}),
url_params =
{
- target_dataset_id: this.track.dataset_id,
+ target_dataset_id: this.track.dataset.id,
action: 'rerun',
tool_id: this.id,
regions: [
@@ -2249,8 +2249,6 @@
// Attribute init.
//
this.dataset = new data.Dataset(obj_dict.dataset);
- this.dataset_id = this.dataset.get('id');
- this.hda_ldda = this.dataset.get('hda_ldda');
this.dataset_check_type = 'converted_datasets_state';
this.data_url_extra_params = {};
this.data_query_wait = ('data_query_wait' in obj_dict ? obj_dict.data_query_wait : DEFAULT_DATA_QUERY_WAIT);
@@ -2395,8 +2393,8 @@
window.location.href =
galaxy_paths.get('sweepster_url') + "?" +
$.param({
- dataset_id: track.dataset_id,
- hda_ldda: track.hda_ldda,
+ dataset_id: track.dataset.id,
+ hda_ldda: track.dataset.get('hda_ldda'),
regions: JSON.stringify(new Backbone.Collection(regions).toJSON())
});
},
@@ -2420,7 +2418,7 @@
],
can_draw: function() {
- if ( this.dataset_id && Drawable.prototype.can_draw.call(this) ) {
+ if ( this.dataset.id && Drawable.prototype.can_draw.call(this) ) {
return true;
}
return false;
@@ -2444,8 +2442,6 @@
set_dataset: function(dataset) {
this.dataset = dataset;
this.data_manager.set('dataset', dataset);
- this.dataset_id = dataset.get('id');
- this.hda_ldda = dataset.get('hda_ldda');
},
/**
@@ -2601,7 +2597,7 @@
// Tracks with no dataset id are handled differently.
// FIXME: is this really necessary?
//
- if (!track.dataset_id) {
+ if (!track.dataset.id) {
return;
}
@@ -2609,7 +2605,7 @@
// about track status.
var init_deferred = $.Deferred(),
params = {
- hda_ldda: track.hda_ldda,
+ hda_ldda: track.dataset.get('hda_ldda'),
data_type: this.dataset_check_type,
chrom: track.view.chrom,
retry: retry
@@ -2771,8 +2767,8 @@
return {
"track_type": this.get_type(),
"name": this.name,
- "hda_ldda": this.hda_ldda,
- "dataset_id": this.dataset_id,
+ "hda_ldda": this.dataset.get('hda_ldda'),
+ "dataset_id": this.dataset.id,
"prefs": this.prefs,
"mode": this.mode,
"filters": this.filters_manager.to_dict(),
@@ -3200,7 +3196,7 @@
// Reset data URL when dataset indexing has completed/when not pending.
var ss_deferred = new util.ServerStateDeferred({
url: self.dataset_state_url,
- url_params: {dataset_id : self.dataset_id, hda_ldda: self.hda_ldda},
+ url_params: {dataset_id : self.dataset.id, hda_ldda: self.dataset.get('hda_ldda')},
interval: self.data_query_wait,
// Set up deferred to check dataset state until it is not pending.
success_fn: function(result) { return result !== "pending"; }
@@ -3651,7 +3647,7 @@
*/
set_min_value: function(new_val) {
this.prefs.min_value = new_val;
- $('#linetrack_' + this.dataset_id + '_minval').text(this.prefs.min_value);
+ $('#linetrack_' + this.dataset.id + '_minval').text(this.prefs.min_value);
this.tile_cache.clear();
this.request_draw();
},
@@ -3661,7 +3657,7 @@
*/
set_max_value: function(new_val) {
this.prefs.max_value = new_val;
- $('#linetrack_' + this.dataset_id + '_maxval').text(this.prefs.max_value);
+ $('#linetrack_' + this.dataset.id + '_maxval').text(this.prefs.max_value);
this.tile_cache.clear();
this.request_draw();
},
@@ -3671,7 +3667,7 @@
track.vertical_range = undefined;
return $.getJSON( track.dataset.url(),
{ data_type: 'data', stats: true, chrom: track.view.chrom, low: 0,
- high: track.view.max_high, hda_ldda: track.hda_ldda }, function(result) {
+ high: track.view.max_high, hda_ldda: track.dataset.get('hda_ldda') }, function(result) {
track.container_div.addClass( "line-track" );
var data = result.data;
if ( isNaN(parseFloat(track.prefs.min_value)) || isNaN(parseFloat(track.prefs.max_value)) ) {
@@ -3689,8 +3685,8 @@
// FIXME: we should probably only save this when the user explicately sets it
// since we lose the ability to compute it on the fly (when changing
// chromosomes for example).
- $('#track_' + track.dataset_id + '_minval').val(track.prefs.min_value);
- $('#track_' + track.dataset_id + '_maxval').val(track.prefs.max_value);
+ $('#track_' + track.dataset.id + '_minval').val(track.prefs.min_value);
+ $('#track_' + track.dataset.id + '_maxval').val(track.prefs.max_value);
}
track.vertical_range = track.prefs.max_value - track.prefs.min_value;
track.total_frequency = data.total_frequency;
@@ -3710,7 +3706,7 @@
}
},
help_text: "Set min value"
- }).addClass('yaxislabel bottom').attr("id", 'linetrack_' + track.dataset_id + '_minval')
+ }).addClass('yaxislabel bottom').attr("id", 'linetrack_' + track.dataset.id + '_minval')
.prependTo(track.container_div),
max_label = $("<div/>").text(round(track.prefs.max_value, 3)).make_text_editable({
num_cols: 6,
@@ -3722,7 +3718,7 @@
}
},
help_text: "Set max value"
- }).addClass('yaxislabel top').attr("id", 'linetrack_' + track.dataset_id + '_maxval')
+ }).addClass('yaxislabel top').attr("id", 'linetrack_' + track.dataset.id + '_maxval')
.prependTo(track.container_div);
});
},
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months
commit/galaxy-central: greg: Add more granular lists of writable repositories for the current user: repositories with missing tool test components, repositories with failing tool tests and repositories with no failing tool tests.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f8d07c988129/
Changeset: f8d07c988129
User: greg
Date: 2013-04-29 23:09:44
Summary: Add more granular lists of writable repositories for the current user: repositories with missing tool test components, repositories with failing tool tests and repositories with no failing tool tests.
Affected #: 8 files
diff -r 8264c77e4f07c0dee932add796f5ee7ce02245b3 -r f8d07c98812903dc98de2665874a2c31b65b84da lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -77,19 +77,19 @@
# Add the web API. # A good resource for RESTful services - http://routes.readthedocs.org/en/latest/restful.html
webapp.add_api_controllers( 'galaxy.webapps.tool_shed.api', app )
webapp.mapper.resource( 'repository',
- 'repositories',
- controller='repositories',
- collection={ 'get_repository_revision_install_info' : 'GET',
- 'get_ordered_installable_revisions' : 'GET' },
- name_prefix='repository_',
- path_prefix='/api',
- parent_resources=dict( member_name='repository', collection_name='repositories' ) )
+ 'repositories',
+ controller='repositories',
+ collection={ 'get_repository_revision_install_info' : 'GET',
+ 'get_ordered_installable_revisions' : 'GET' },
+ name_prefix='repository_',
+ path_prefix='/api',
+ parent_resources=dict( member_name='repository', collection_name='repositories' ) )
webapp.mapper.resource( 'repository_revision',
- 'repository_revisions',
- controller='repository_revisions',
- name_prefix='repository_revision_',
- path_prefix='/api',
- parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
+ 'repository_revisions',
+ controller='repository_revisions',
+ name_prefix='repository_revision_',
+ path_prefix='/api',
+ parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 9 months