galaxy-commits
Threads by month
- ----- 2025 -----
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2013
- 1 participants
- 183 discussions

commit/galaxy-central: dan: Move build_tool_version_select_field() to ToolBox, to follow move of get_tool_components() in a8caad109542661a7222c504a529ab5afbe303fb.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5a17ab5f5466/
changeset: 5a17ab5f5466
user: dan
date: 2013-03-13 18:50:11
summary: Move build_tool_version_select_field() to ToolBox, to follow move of get_tool_components() in a8caad109542661a7222c504a529ab5afbe303fb.
affected #: 2 files
diff -r 8a3d271d79bb6736b88b76918e4820d771c12250 -r 5a17ab5f5466eaf61e80d6efda90c394c601f4ad lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -56,6 +56,7 @@
from galaxy.util.template import fill_template
from galaxy.visualization.genome.visual_analytics import TracksterConfig
from galaxy.web import url_for
+from galaxy.web.form_builder import SelectField
from tool_shed.util import shed_util_common
log = logging.getLogger( __name__ )
@@ -425,7 +426,23 @@
tool_version_select_field = self.build_tool_version_select_field( tools, tool.id, set_selected )
break
return tool_version_select_field, tools, tool
-
+
+ def build_tool_version_select_field( self, tools, tool_id, set_selected ):
+ """Build a SelectField whose options are the ids for the received list of tools."""
+ options = []
+ refresh_on_change_values = []
+ for tool in tools:
+ options.insert( 0, ( tool.version, tool.id ) )
+ refresh_on_change_values.append( tool.id )
+ select_field = SelectField( name='tool_id', refresh_on_change=True, refresh_on_change_values=refresh_on_change_values )
+ for option_tup in options:
+ selected = set_selected and option_tup[1] == tool_id
+ if selected:
+ select_field.add_option( 'version %s' % option_tup[0], option_tup[1], selected=True )
+ else:
+ select_field.add_option( 'version %s' % option_tup[0], option_tup[1] )
+ return select_field
+
def load_tool_tag_set( self, elem, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None, index=None ):
try:
path = elem.get( "file" )
diff -r 8a3d271d79bb6736b88b76918e4820d771c12250 -r 5a17ab5f5466eaf61e80d6efda90c394c601f4ad lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -14,7 +14,6 @@
from galaxy.util.hash_util import is_hashable
from galaxy.web import error, url_for
from galaxy.web.base.controller import BaseUIController
-from galaxy.web.form_builder import SelectField
log = logging.getLogger( __name__ )
@@ -219,21 +218,7 @@
add_frame=add_frame,
tool_id_version_message=tool_id_version_message,
**vars )
- def build_tool_version_select_field( self, tools, tool_id, set_selected ):
- """Build a SelectField whose options are the ids for the received list of tools."""
- options = []
- refresh_on_change_values = []
- for tool in tools:
- options.insert( 0, ( tool.version, tool.id ) )
- refresh_on_change_values.append( tool.id )
- select_field = SelectField( name='tool_id', refresh_on_change=True, refresh_on_change_values=refresh_on_change_values )
- for option_tup in options:
- selected = set_selected and option_tup[1] == tool_id
- if selected:
- select_field.add_option( 'version %s' % option_tup[0], option_tup[1], selected=True )
- else:
- select_field.add_option( 'version %s' % option_tup[0], option_tup[1] )
- return select_field
+
@web.expose
def redirect( self, trans, redirect_url=None, **kwd ):
if not redirect_url:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Handle uninstalling Data Manager from Tool Shed corner case, where two different instantiations of a Data Manager have been installed, e.g. due to a new installable changeset revision.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8a3d271d79bb/
changeset: 8a3d271d79bb
user: dan
date: 2013-03-13 18:26:31
summary: Handle uninstalling Data Manager from Tool Shed corner case, where two different instantiations of a Data Manager have been installed, e.g. due to a new installable changeset revision.
affected #: 1 file
diff -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 -r 8a3d271d79bb6736b88b76918e4820d771c12250 lib/tool_shed/util/shed_util.py
--- a/lib/tool_shed/util/shed_util.py
+++ b/lib/tool_shed/util/shed_util.py
@@ -1574,15 +1574,33 @@
root = tree.getroot()
assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
+ load_old_data_managers_by_guid = {}
data_manager_config_has_changes = False
config_elems = []
for elem in root:
- if elem.tag != 'data_manager' or elem.get( 'guid', None ) not in guids:
+ # Match Data Manager elements by guid and installed_changeset_revision
+ elem_matches_removed_data_manager = False
+ if elem.tag == 'data_manager':
+ guid = elem.get( 'guid', None )
+ if guid in guids:
+ tool_elem = elem.find( 'tool' )
+ if tool_elem is not None:
+ installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
+ if installed_changeset_revision_elem is not None:
+ if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
+ elem_matches_removed_data_manager = True
+ else:
+ # This is a different version, which had been previously overridden
+ load_old_data_managers_by_guid[ guid ] = elem
+ if elem_matches_removed_data_manager:
+ data_manager_config_has_changes = True
+ else:
config_elems.append( elem )
- else:
- data_manager_config_has_changes = True
- #remove data managers from in memory
+ # Remove data managers from in memory
app.data_managers.remove_manager( guids )
+ # Load other versions of any now uninstalled data managers, if any
+ for elem in load_old_data_managers_by_guid.itervalues():
+ app.data_managers.load_manager_from_elem( elem )
# Persist the altered shed_data_manager_config file.
if data_manager_config_has_changes:
suc.data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/34dfad3b8811/
changeset: 34dfad3b8811
user: inithello
date: 2013-03-13 17:45:47
summary: Shut down the update manager when the galaxy UniverseApplication's shutdown method is called.
affected #: 1 file
diff -r 1a68cc9626220afa542f0ac00951511e7eae56bf -r 34dfad3b8811968a7988de90817f499dc7ee860c lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -106,6 +106,8 @@
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed.galaxy_install import update_manager
self.update_manager = update_manager.UpdateManager( self )
+ else:
+ self.update_manager = None
# Load proprietary datatype converters and display applications.
self.installed_repository_manager.load_proprietary_converters_and_display_applications()
# Load datatype display applications defined in local datatypes_conf.xml
@@ -166,6 +168,8 @@
self.object_store.shutdown()
if self.heartbeat:
self.heartbeat.shutdown()
+ if self.update_manager:
+ self.update_manager.shutdown()
try:
# If the datatypes registry was persisted, attempt to
# remove the temporary file in which it was written.
https://bitbucket.org/galaxy/galaxy-central/commits/9159442d180f/
changeset: 9159442d180f
user: inithello
date: 2013-03-13 17:48:55
summary: Add repository counts to install and test repository summary output. Add functional tests for repositories with datatype converters. Add functional tests for browsing Galaxy utilities. Add functional tests for the Galaxy update manager. Enhance functional tests to verify behavior when a repository is deleted. Change code style to be compliant with Galaxy team standards.
affected #: 13 files
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -754,15 +754,15 @@
if repositories_tested > 0:
if repositories_passed:
print '# ----------------------------------------------------------------------------------'
- print "# Repositories passed:"
+ print "# %d repositories passed:" % len( repositories_passed )
show_summary_output( repositories_passed )
if repositories_failed:
print '# ----------------------------------------------------------------------------------'
- print "# Repositories failed:"
+ print "# %d repositories failed:" % len( repositories_failed )
show_summary_output( repositories_failed )
if repositories_failed_install:
print '# ----------------------------------------------------------------------------------'
- print "# Repositories not installed correctly:"
+ print "# %d repositories not installed correctly:" % len( repositories_failed_install )
show_summary_output( repositories_failed_install )
print "####################################################################################"
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -9,15 +9,19 @@
def delete_obj( obj ):
sa_session.delete( obj )
sa_session.flush()
+
def delete_user_roles( user ):
for ura in user.roles:
sa_session.delete( ura )
sa_session.flush()
+
def flush( obj ):
sa_session.add( obj )
sa_session.flush()
+
def get_all_repositories():
return sa_session.query( model.Repository ).all()
+
def get_all_installed_repositories( actually_installed=False ):
if actually_installed:
return ga_session.query( galaxy.model.ToolShedRepository ) \
@@ -27,38 +31,46 @@
.all()
else:
return ga_session.query( galaxy.model.ToolShedRepository ).all()
+
def get_category_by_name( name ):
return sa_session.query( model.Category ) \
.filter( model.Category.table.c.name == name ) \
.first()
+
def get_default_user_permissions_by_role( role ):
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.role_id == role.id ) \
.all()
+
def get_default_user_permissions_by_user( user ):
return sa_session.query( model.DefaultUserPermissions ) \
.filter( model.DefaultUserPermissions.table.c.user_id==user.id ) \
.all()
+
def get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
galaxy.model.ToolShedRepository.table.c.owner == owner,
galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
+
def get_installed_repository_by_id( repository_id ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( galaxy.model.ToolShedRepository.table.c.id == repository_id ) \
.first()
+
def get_installed_repository_by_name_owner( repository_name, owner ):
return ga_session.query( galaxy.model.ToolShedRepository ) \
.filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
galaxy.model.ToolShedRepository.table.c.owner == owner ) ) \
.first()
+
def get_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
+
def get_repository_reviews( repository_id, reviewer_user_id=None, changeset_revision=None ):
if reviewer_user_id and changeset_revision:
reviews = sa_session.query( model.RepositoryReview ) \
@@ -79,6 +91,7 @@
model.RepositoryReview.table.c.deleted == False ) ) \
.all()
return reviews
+
def get_reviews_ordered_by_changeset_revision( repository_id, changelog_tuples, reviewer_user_id=None ):
reviews = get_repository_reviews( repository_id, reviewer_user_id=reviewer_user_id )
ordered_reviews = []
@@ -87,16 +100,25 @@
if str( review.changeset_revision ) == str( changeset_hash ):
ordered_reviews.append( review )
return ordered_reviews
+
def get_repository_by_id( repository_id ):
return sa_session.query( model.Repository ) \
.filter( model.Repository.table.c.id == repository_id ) \
.first()
+
def get_repository_downloadable_revisions( repository_id ):
revisions = sa_session.query( model.RepositoryMetadata ) \
.filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
model.RepositoryMetadata.table.c.downloadable == True ) ) \
.all()
return revisions
+def get_repository_metadata_for_changeset_revision( repository_id, changeset_revision ):
+ repository_metadata = sa_session.query( model.RepositoryMetadata ) \
+ .filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
+ model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+ return repository_metadata
+
def get_repository_review_by_user_id_changeset_revision( user_id, repository_id, changeset_revision ):
review = sa_session.query( model.RepositoryReview ) \
.filter( and_( model.RepositoryReview.table.c.user_id == user_id,
@@ -104,35 +126,44 @@
model.RepositoryReview.table.c.changeset_revision == changeset_revision ) ) \
.first()
return review
+
def get_role_by_name( role_name ):
return sa_session.query( model.Role ) \
.filter( model.Role.table.c.name == role_name ) \
.first()
+
def get_user( email ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.email==email ) \
.first()
+
def get_user_by_name( username ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.username==username ) \
.first()
+
def mark_obj_deleted( obj ):
obj.deleted = True
sa_session.add( obj )
sa_session.flush()
+
def refresh( obj ):
sa_session.refresh( obj )
+
def ga_refresh( obj ):
ga_session.refresh( obj )
+
def get_galaxy_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
return role
raise AssertionError( "Private role not found for user '%s'" % user.email )
+
def get_galaxy_user( email ):
return ga_session.query( galaxy.model.User ) \
.filter( galaxy.model.User.table.c.email==email ) \
.first()
+
def get_repository_by_name_and_owner( name, owner_username ):
owner = get_user_by_name( owner_username )
repository = sa_session.query( model.Repository ) \
@@ -140,6 +171,7 @@
model.Repository.table.c.user_id == owner.id ) ) \
.first()
return repository
+
def get_repository_metadata_by_repository_id_changeset_revision( repository_id, changeset_revision ):
repository_metadata = sa_session.query( model.RepositoryMetadata ) \
.filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -11,7 +11,9 @@
log = logging.getLogger( __name__ )
+
class ShedTwillTestCase( TwillTestCase ):
+
def setUp( self ):
# Security helper
self.security = security.SecurityHelper( id_secret='changethisinproductiontoo' )
@@ -35,36 +37,65 @@
self.galaxy_tool_dependency_dir = os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
self.home()
+
def add_repository_review_component( self, **kwd ):
url = '/repository_review/create_component?operation=create'
self.visit_url( url )
self.submit_form( 1, 'create_component_button', **kwd )
+
def browse_category( self, category, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_valid_categories?sort=name&operation=valid_repositories_by_category&id=%s' % \
self.security.encode_id( category.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def browse_component_review( self, review, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository_review/browse_review?id=%s' % self.security.encode_id( review.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
+ def browse_custom_datatypes( self, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/browse_datatypes'
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+
def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
+ def browse_repository_dependencies( self, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/browse_repository_dependencies'
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+
def browse_tool_shed( self, url, strings_displayed=[], strings_not_displayed=[] ):
self.visit_galaxy_url( '/admin_toolshed/browse_tool_shed?tool_shed_url=%s' % url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
+ def browse_tool_dependencies( self, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/browse_tool_dependencies'
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+
+ def browse_tools( self, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/browse_tools'
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+
def check_count_of_metadata_revisions_associated_with_repository( self, repository, metadata_count ):
self.check_repository_changelog( repository )
self.check_string_count_in_page( 'Repository metadata is associated with this change set.', metadata_count )
+
def check_for_valid_tools( self, repository, strings_displayed=[], strings_not_displayed=[] ):
strings_displayed.append( 'Valid tools' )
self.display_manage_repository_page( repository, strings_displayed, strings_not_displayed )
+
def check_galaxy_repository_db_status( self, repository_name, owner, expected_status ):
installed_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, owner )
assert installed_repository.status == expected_status, 'Status in database is %s, expected %s' % \
( installed_repository.status, expected_status )
+
def check_galaxy_repository_tool_panel_section( self, repository, expected_tool_panel_section ):
metadata = repository.metadata
assert 'tools' in metadata, 'Tools not found in metadata: %s' % metadata
@@ -83,6 +114,7 @@
tool_panel_section = tool_panel_section_metadata[ tool_guid ][ 0 ][ 'name' ]
assert tool_panel_section == expected_tool_panel_section, 'Expected tool panel section %s, found %s\nMetadata: %s\n' % \
( expected_tool_panel_section, tool_panel_section, metadata )
+
def check_installed_repository_tool_dependencies( self,
installed_repository,
strings_displayed=[],
@@ -100,15 +132,18 @@
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def check_repository_changelog( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision=None, changeset_revision=None ):
strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username ]
if depends_on_changeset_revision:
strings_displayed.append( depends_on_changeset_revision )
self.display_manage_repository_page( repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed )
+
def check_repository_metadata( self, repository, tip_only=True ):
if tip_only:
assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \
@@ -117,7 +152,8 @@
assert len( self.get_repository_metadata_revisions( repository ) ) > 0, \
'Repository tip is not a metadata revision: Repository tip - %s, metadata revisions - %s.' % \
( self.get_repository_tip( repository ), ', '.join( self.get_repository_metadata_revisions( repository ) ) )
- def check_repository_tools_for_changeset_revision( self, repository, changeset_revision ):
+
+ def check_repository_tools_for_changeset_revision( self, repository, changeset_revision, tool_metadata_strings_displayed=[], tool_page_strings_displayed=[] ):
'''
Loop through each tool dictionary in the repository metadata associated with the received changeset_revision.
For each of these, check for a tools attribute, and load the tool metadata page if it exists, then display that tool's page.
@@ -128,20 +164,17 @@
if 'tools' not in metadata:
raise AssertionError( 'No tools in %s revision %s.' % ( repository.name, changeset_revision ) )
for tool_dict in metadata[ 'tools' ]:
- metadata_strings_displayed = [ tool_dict[ 'guid' ],
- tool_dict[ 'version' ],
- tool_dict[ 'id' ],
- tool_dict[ 'name' ],
- tool_dict[ 'description' ],
- changeset_revision ]
+ tool_id = tool_dict[ 'id' ]
+ tool_xml = tool_dict[ 'tool_config' ]
url = '/repository/view_tool_metadata?repository_id=%s&changeset_revision=%s&tool_id=%s' % \
- ( self.security.encode_id( repository.id ), changeset_revision, tool_dict[ 'id' ] )
+ ( self.security.encode_id( repository.id ), changeset_revision, tool_id )
self.visit_url( url )
- self.check_for_strings( metadata_strings_displayed )
- self.load_display_tool_page( repository, tool_xml_path=tool_dict[ 'tool_config' ],
+ self.check_for_strings( tool_metadata_strings_displayed )
+ self.load_display_tool_page( repository, tool_xml_path=tool_xml,
changeset_revision=changeset_revision,
- strings_displayed=[ '%s (version %s)' % ( tool_dict[ 'name' ], tool_dict[ 'version' ] ) ],
+ strings_displayed=tool_page_strings_displayed,
strings_not_displayed=[] )
+
def check_repository_invalid_tools_for_changeset_revision( self, repository, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
'''Load the invalid tool page for each invalid tool associated with this changeset revision and verify the received error messages.'''
repository_metadata = self.get_repository_metadata_by_changeset_revision( repository, changeset_revision )
@@ -153,6 +186,7 @@
changeset_revision=changeset_revision,
strings_displayed=strings_displayed,
strings_not_displayed=strings_not_displayed )
+
def check_string_count_in_page( self, pattern, min_count, max_count=None ):
"""Checks the number of 'pattern' occurrences in the current browser page"""
page = self.last_page()
@@ -167,6 +201,7 @@
errmsg = "%i occurrences of '%s' found (min. %i, max. %i).\npage content written to '%s' " % \
( pattern_count, pattern, min_count, max_count, fname )
raise AssertionError( errmsg )
+
def create_category( self, **kwd ):
category = test_db_util.get_category_by_name( kwd[ 'name' ] )
if category is None:
@@ -174,6 +209,7 @@
self.submit_form( form_no=1, button="create_category_button", **kwd )
category = test_db_util.get_category_by_name( kwd[ 'name' ] )
return category
+
def create_checkbox_query_string( self, field_name, value ):
'''
From galaxy.web.form_builder.CheckboxField:
@@ -197,6 +233,7 @@
return '%s=%s&%s=%s' % ( field_name, field_value, field_name, field_value )
else:
return '%s=%s' % ( field_name, field_value )
+
def create_repository_complex_dependency( self, repository, xml_filename, depends_on={} ):
self.generate_repository_dependency_xml( depends_on[ 'repositories' ],
xml_filename,
@@ -207,6 +244,7 @@
'tool_dependencies.xml',
filepath=os.path.split( xml_filename )[0],
commit_message='Uploaded dependency on %s.' % ', '.join( repo.name for repo in depends_on[ 'repositories' ] ) )
+
def create_repository_dependency( self, repository=None, depends_on=[], filepath=None ):
dependency_description = '%s depends on %s.' % ( repository.name, ', '.join( repo.name for repo in depends_on ) )
self.generate_repository_dependency_xml( depends_on,
@@ -221,6 +259,7 @@
commit_message='Uploaded dependency on %s.' % ', '.join( repo.name for repo in depends_on ),
strings_displayed=[],
strings_not_displayed=[] )
+
def create_repository_review( self, repository, review_contents_dict, changeset_revision=None, copy_from=None):
strings_displayed = []
if not copy_from:
@@ -242,6 +281,7 @@
( self.get_repository_tip( repository ), self.security.encode_id( repository.id ), self.security.encode_id( review_id ) )
self.visit_url( url )
self.fill_review_form( review_contents_dict, strings_displayed, strings_not_displayed )
+
def create_user_in_galaxy( self, cntrller='user', email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
self.visit_galaxy_url( "/user/create?cntrller=%s&use_panels=False" % cntrller )
tc.fv( '1', 'email', email )
@@ -272,6 +312,7 @@
except:
pass
return previously_created, username_taken, invalid_username
+
def delete_files_from_repository( self, repository, filenames=[], strings_displayed=[ 'were deleted from the repository' ], strings_not_displayed=[] ):
files_to_delete = []
basepath = self.get_repo_path( repository )
@@ -287,24 +328,38 @@
tc.fv( "1", "selected_files_to_delete", ','.join( files_to_delete ) )
tc.submit( 'select_files_to_delete_button' )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
+ def delete_repository( self, repository ):
+ repository_id = self.security.encode_id( repository.id )
+ self.visit_url( '/admin/browse_repositories' )
+ url = '/admin/browse_repositories?operation=Delete&id=%s' % repository_id
+ strings_displayed = [ 'Deleted 1 repository', repository.name ]
+ strings_not_displayed = []
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_all_workflows( self, strings_displayed=[], strings_not_displayed=[] ):
url = '/workflow'
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_galaxy_browse_repositories_page( self, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/browse_repositories'
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_installed_repository_manage_page( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
strings_displayed.append( installed_repository.installed_changeset_revision )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_installed_workflow_image( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/generate_workflow_image?repository_id=%s&workflow_name=%s' % \
( self.security.encode_id( repository.id ), tool_shed_encode( workflow_name ) )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_manage_repository_page( self, repository, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
base_url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
if changeset_revision:
@@ -314,10 +369,12 @@
url = base_url
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_repository_clone_page( self, owner_name, repository_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/repos/%s/%s' % ( owner_name, repository_name )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_repository_file_contents( self, repository, filename, filepath=None, strings_displayed=[], strings_not_displayed=[] ):
'''Find a file in the repository and display the contents.'''
basepath = self.get_repo_path( repository )
@@ -331,10 +388,12 @@
url = '/repository/get_file_contents?file_path=%s' % os.path.join( relative_path, filename )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_reviewed_repositories_owned_by_user( self, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository_review/reviewed_repositories_i_own'
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def edit_repository_categories( self, repository, categories_to_add=[], categories_to_remove=[], restore_original=True ):
url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
@@ -359,10 +418,12 @@
strings_not_displayed.append( "selected>%s" % category )
tc.submit( "manage_categories_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def display_repository_reviews_by_user( self, user, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository_review/repository_reviews_by_user?id=%s' % self.security.encode_id( user.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def edit_repository_information( self, repository, **kwd ):
url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
@@ -381,6 +442,7 @@
strings_displayed.append( self.escape_html( original_information[ input_elem_name ] ) )
tc.submit( "edit_repository_button" )
self.check_for_strings( strings_displayed )
+
def escape_html( self, string, unescape=False ):
html_entities = [ ('&', 'X' ), ( "'", ''' ), ( '"', '"' ) ]
for character, replacement in html_entities:
@@ -389,6 +451,7 @@
else:
string = string.replace( character, replacement )
return string
+
def fill_review_form( self, review_contents_dict, strings_displayed=[], strings_not_displayed=[] ):
kwd = dict()
for label, contents in review_contents_dict.items():
@@ -404,6 +467,7 @@
self.submit_form( 1, 'Workflows__ESEP__review_button', **kwd )
strings_displayed.append( 'Reviews were saved' )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def galaxy_login( self, email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
previously_created, username_taken, invalid_username = \
self.create_user_in_galaxy( email=email, password=password, username=username, redirect=redirect )
@@ -413,11 +477,13 @@
tc.fv( '1', 'redirect', redirect )
tc.fv( '1', 'password', password )
tc.submit( 'login_button' )
+
def galaxy_logout( self ):
self.home()
self.visit_galaxy_url( "/user/logout" )
self.check_page_for_string( "You have been logged out" )
self.home()
+
def generate_invalid_dependency_xml( self, xml_filename, url, name, owner, changeset_revision, complex=True, package=None, version=None, description=None ):
file_path = os.path.split( xml_filename )[0]
dependency_entries = []
@@ -440,6 +506,7 @@
repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
+
def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='', complex=False, package=None, version=None ):
file_path = os.path.split( xml_filename )[0]
if not os.path.exists( file_path ):
@@ -464,11 +531,13 @@
repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
+
def generate_temp_path( self, test_script_path, additional_paths=[] ):
temp_path = os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
if not os.path.exists( temp_path ):
os.makedirs( temp_path )
return temp_path
+
def get_datatypes_count( self ):
url = '/admin/view_datatypes_registry'
self.visit_galaxy_url( url )
@@ -477,11 +546,13 @@
if datatypes_count:
return datatypes_count.group( 1 )
return None
+
def get_filename( self, filename, filepath=None ):
if filepath is not None:
return os.path.abspath( os.path.join( filepath, filename ) )
else:
return os.path.abspath( os.path.join( self.file_dir, filename ) )
+
def get_last_reviewed_revision_by_user( self, user, repository ):
changelog_tuples = self.get_repository_changelog_tuples( repository )
reviews = test_db_util.get_reviews_ordered_by_changeset_revision( repository.id, changelog_tuples, reviewer_user_id = user.id )
@@ -490,6 +561,7 @@
else:
last_review = None
return last_review
+
def get_or_create_repository( self, owner=None, strings_displayed=[], strings_not_displayed=[], **kwd ):
repository = test_db_util.get_repository_by_name_and_owner( kwd[ 'name' ], owner )
if repository is None:
@@ -498,6 +570,7 @@
self.check_for_strings( strings_displayed, strings_not_displayed )
repository = test_db_util.get_repository_by_name_and_owner( kwd[ 'name' ], owner )
return repository
+
def get_repo_path( self, repository ):
# An entry in the hgweb.config file looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
@@ -505,6 +578,7 @@
return self.hgweb_config_manager.get_entry( lhs )
except:
raise Exception( "Entry for repository %s missing in hgweb config file %s." % ( lhs, self.hgweb_config_manager.hgweb_config ) )
+
def get_repository_changelog_tuples( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
changelog_tuples = []
@@ -512,12 +586,14 @@
ctx = repo.changectx( changeset )
changelog_tuples.append( ( ctx.rev(), repo.changectx( changeset ) ) )
return changelog_tuples
+
def get_repository_datatypes_count( self, repository ):
metadata = self.get_repository_metadata( repository )[0].metadata
if 'datatypes' not in metadata:
return 0
else:
return len( metadata[ 'datatypes' ] )
+
def get_repository_file_list( self, base_path, current_path=None ):
'''Recursively load repository folder contents and append them to a list. Similar to os.walk but via /repository/open_folder.'''
if current_path is None:
@@ -548,19 +624,20 @@
else:
returned_file_list.append( file_dict[ 'title' ] )
return returned_file_list
+
def get_repository_metadata( self, repository ):
return [ metadata_revision for metadata_revision in repository.metadata_revisions ]
+
def get_repository_metadata_by_changeset_revision( self, repository, changeset_revision ):
- found = None
- for metadata_revision in repository.metadata_revisions:
- if metadata_revision.changeset_revision == changeset_revision:
- found = metadata_revision
- return found
+ return test_db_util.get_repository_metadata_for_changeset_revision( repository.id, changeset_revision )
+
def get_repository_metadata_revisions( self, repository ):
return [ str( repository_metadata.changeset_revision ) for repository_metadata in repository.metadata_revisions ]
+
def get_repository_tip( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
return str( repo.changectx( repo.changelog.tip() ) )
+
def get_tools_from_repository_metadata( self, repository, include_invalid=False ):
'''Get a list of valid and (optionally) invalid tool dicts from the repository metadata.'''
valid_tools = []
@@ -571,6 +648,7 @@
if include_invalid and 'invalid_tools' in repository_metadata.metadata:
invalid_tools.append( dict( tools=repository_metadata.metadata[ 'invalid_tools' ], changeset_revision=repository_metadata.changeset_revision ) )
return valid_tools, invalid_tools
+
def grant_role_to_user( self, user, role ):
strings_displayed = [ self.security.encode_id( role.id ), role.name ]
strings_not_displayed = []
@@ -587,6 +665,7 @@
self.visit_url( url )
strings_displayed = [ "Role '%s' has been updated" % role.name ]
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def grant_write_access( self,
repository,
usernames=[],
@@ -601,6 +680,7 @@
tc.fv( "user_access", "allow_push", '+%s' % username )
tc.submit( 'user_access_button' )
self.check_for_strings( post_submit_strings_displayed, post_submit_strings_not_displayed )
+
def import_workflow( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/import_workflow?repository_id=%s&workflow_name=%s' % \
( self.security.encode_id( repository.id ), tool_shed_encode( workflow_name ) )
@@ -608,6 +688,7 @@
if workflow_name not in strings_displayed:
strings_displayed.append( workflow_name )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def initiate_installation_process( self,
install_tool_dependencies=False,
install_repository_dependencies=True,
@@ -633,6 +714,7 @@
( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
self.visit_galaxy_url( url )
return util.listify( repository_ids )
+
def install_repositories_from_search_results( self, repositories, install_tool_dependencies=False,
strings_displayed=[], strings_not_displayed=[], **kwd ):
'''
@@ -658,6 +740,7 @@
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
repository_ids = self.initiate_installation_process()
self.wait_for_repository_installation( repository_ids )
+
def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
install_repository_dependencies=True, changeset_revision=None,
strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[],
@@ -702,6 +785,7 @@
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
repository_ids = self.initiate_installation_process( new_tool_panel_section=new_tool_panel_section )
self.wait_for_repository_installation( repository_ids )
+
def load_citable_url( self,
username,
repository_name,
@@ -733,20 +817,24 @@
url = '/repository/browse_repositories?user_id=%s&operation=repositories_by_user' % encoded_user_id
self.visit_url( url )
self.check_for_strings( strings_displayed_in_iframe, strings_not_displayed_in_iframe )
+
def load_display_tool_page( self, repository, tool_xml_path, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/display_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def load_galaxy_tool_migrations_page( self, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin/review_tool_migration_stages'
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def load_workflow_image_in_tool_shed( self, repository, workflow_name, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
if not changeset_revision:
changeset_revision = self.get_repository_tip( repository )
@@ -757,10 +845,12 @@
( self.security.encode_id( metadata.id ), tool_shed_encode( workflow_name ) )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def manage_review_components( self, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository_review/manage_components'
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
repository = test_db_util.get_repository_by_name_and_owner( name, owner )
if not changeset_revision:
@@ -768,6 +858,7 @@
self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), changeset_revision ) )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def preview_workflow_in_tool_shed( self, repository_name, owner, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
repository = test_db_util.get_repository_by_name_and_owner( repository_name, owner )
metadata = self.get_repository_metadata( repository )
@@ -775,11 +866,13 @@
( tool_shed_encode( workflow_name ), self.security.encode_id( metadata[0].id ) )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def reactivate_repository( self, installed_repository ):
url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
strings_displayed = [ installed_repository.name, 'repository has been activated' ]
self.check_for_strings( strings_displayed, [] )
+
def reinstall_repository( self,
installed_repository,
install_repository_dependencies=True,
@@ -807,26 +900,32 @@
new_tool_panel_section )
# Finally, wait until all repositories are in a final state (either Error or Installed) before returning.
self.wait_for_repository_installation( repository_ids )
+
def repository_is_new( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
return tip_ctx.rev() < 0
+
def reset_installed_repository_metadata( self, repository ):
url = '/admin_toolshed/reset_repository_metadata?id=%s' % self.security.encode_id( repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( [ 'Metadata has been reset' ] )
+
def reset_metadata_on_selected_repositories( self, repository_ids ):
self.visit_url( '/admin/reset_metadata_on_selected_repositories_in_tool_shed' )
kwd = dict( repository_ids=repository_ids )
self.submit_form( form_no=1, button="reset_metadata_on_selected_repositories_button", **kwd )
+
def reset_metadata_on_selected_installed_repositories( self, repository_ids ):
self.visit_galaxy_url( '/admin_toolshed/reset_metadata_on_selected_installed_repositories' )
kwd = dict( repository_ids=repository_ids )
self.submit_form( form_no=1, button="reset_metadata_on_selected_repositories_button", **kwd )
+
def reset_repository_metadata( self, repository ):
url = '/repository/reset_all_metadata?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( [ 'All repository metadata has been reset.' ] )
+
def review_repository( self, repository, review_contents_dict, user=None, changeset_revision=None ):
strings_displayed = []
strings_not_displayed = []
@@ -838,10 +937,12 @@
url = '/repository_review/edit_review?id=%s' % self.security.encode_id( review.id )
self.visit_url( url )
self.fill_review_form( review_contents_dict, strings_displayed, strings_not_displayed )
+
def revoke_write_access( self, repository, username ):
url = '/repository/manage_repository?user_access_button=Remove&id=%s&remove_auth=%s' % \
( self.security.encode_id( repository.id ), username )
self.visit_url( url )
+
def search_for_valid_tools( self, search_fields={}, exact_matches=False, strings_displayed=[], strings_not_displayed=[], from_galaxy=False ):
if from_galaxy:
galaxy_url = '?galaxy_url=%s' % self.galaxy_url
@@ -854,6 +955,7 @@
tc.fv( "1", field_name, search_string )
tc.submit()
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def send_message_to_repository_owner( self,
repository,
message,
@@ -867,18 +969,30 @@
tc.fv( 1, 'message', message )
tc.submit()
self.check_for_strings( post_submit_strings_displayed, post_submit_strings_not_displayed )
+
def set_repository_deprecated( self, repository, set_deprecated=True, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/deprecate?id=%s&mark_deprecated=%s' % ( self.security.encode_id( repository.id ), set_deprecated )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def set_repository_malicious( self, repository, set_malicious=True, strings_displayed=[], strings_not_displayed=[] ):
self.display_manage_repository_page( repository )
tc.fv( "malicious", "malicious", set_malicious )
tc.submit( "malicious_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def tip_has_metadata( self, repository ):
tip = self.get_repository_tip( repository )
return test_db_util.get_repository_metadata_by_repository_id_changeset_revision( repository.id, tip )
+
+ def undelete_repository( self, repository ):
+ repository_id = self.security.encode_id( repository.id )
+ url = '/admin/browse_repositories?operation=Undelete&id=%s' % repository_id
+ strings_displayed = [ 'Undeleted 1 repository', repository.name ]
+ strings_not_displayed = []
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+
def uninstall_repository( self, installed_repository, remove_from_disk=True ):
url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
@@ -893,10 +1007,12 @@
else:
strings_displayed.append( 'has been deactivated' )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
+
def update_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/check_for_updates?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+
def upload_file( self,
repository,
filename,
@@ -942,18 +1058,26 @@
# else:
# time.sleep( 1 )
# continue
+
def verify_installed_repositories( self, installed_repositories=[], uninstalled_repositories=[] ):
for repository_name, repository_owner in installed_repositories:
galaxy_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, repository_owner )
if galaxy_repository:
assert galaxy_repository.status == 'Installed', \
'Repository %s should be installed, but is %s' % ( repository_name, galaxy_repository.status )
+
def verify_installed_repository_metadata_unchanged( self, name, owner ):
installed_repository = test_db_util.get_installed_repository_by_name_owner( name, owner )
metadata = installed_repository.metadata
self.reset_installed_repository_metadata( installed_repository )
new_metadata = installed_repository.metadata
assert metadata == new_metadata, 'Metadata for installed repository %s differs after metadata reset.' % name
+
+ def verify_installed_repository_no_tool_panel_section( self, repository ):
+ '''Verify that there is no 'tool_panel_section' entry in the repository metadata.'''
+ metadata = repository.metadata
+ assert 'tool_panel_section' not in metadata, 'Tool panel section incorrectly found in metadata: %s' % metadata
+
def verify_installed_repository_data_table_entries( self, required_data_table_entries ):
# The value of the received required_data_table_entries will be something like: [ 'sam_fa_indexes' ]
data_tables = util.parse_xml( self.shed_tool_data_table_conf )
@@ -1006,6 +1130,7 @@
# We better have an entry like: <table comment_char="#" name="sam_fa_indexes"> in our parsed data_tables
# or we know that the repository was not correctly installed!
assert found, 'No entry for %s in %s.' % ( required_data_table_entry, self.shed_tool_data_table_conf )
+
def verify_repository_reviews( self, repository, reviewer=None, strings_displayed=[], strings_not_displayed=[] ):
changeset_revision = self.get_repository_tip( repository )
# Verify that the currently logged in user has a repository review for the specified repository, reviewer, and changeset revision.
@@ -1017,6 +1142,7 @@
# Load the review and check for the components passed in strings_displayed.
review = test_db_util.get_repository_review_by_user_id_changeset_revision( reviewer.id, repository.id, changeset_revision )
self.browse_component_review( review, strings_displayed=strings_displayed )
+
def verify_tool_metadata_for_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
repository_id = self.security.encode_id( installed_repository.id )
for tool in installed_repository.metadata[ 'tools' ]:
@@ -1025,6 +1151,7 @@
url = '/admin_toolshed/view_tool_metadata?repository_id=%s&tool_id=%s' % ( repository_id, urllib.quote_plus( tool[ 'id' ] ) )
self.visit_galaxy_url( url )
self.check_for_strings( strings, strings_not_displayed )
+
def verify_unchanged_repository_metadata( self, repository ):
old_metadata = dict()
new_metadata = dict()
@@ -1036,14 +1163,17 @@
# Python's dict comparison recursively compares sorted key => value pairs and returns true if any key or value differs,
# or if the number of keys differs.
assert old_metadata == new_metadata, 'Metadata changed after reset on repository %s.' % repository.name
+
def view_installed_workflow( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/view_workflow?repository_id=%s&workflow_name=%s' % \
( self.security.encode_id( repository.id ), tool_shed_encode( workflow_name ) )
self.visit_galaxy_url( url )
self.check_for_strings( strings, strings_not_displayed )
+
def visit_galaxy_url( self, url ):
url = '%s%s' % ( self.galaxy_url, url )
self.visit_url( url )
+
def wait_for_repository_installation( self, repository_ids ):
final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
galaxy_model.ToolShedRepository.installation_status.INSTALLED ]
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -5,8 +5,10 @@
repository_description = "Galaxy's filtering tool for test 0000"
repository_long_description = "Long description of Galaxy's filtering tool for test 0000"
+
class TestBasicRepositoryFeatures( ShedTwillTestCase ):
'''Test core repository features.'''
+
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
@@ -24,10 +26,12 @@
admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = test_db_util.get_private_role( admin_user )
+
def test_0005_create_categories( self ):
"""Create categories for this test suite"""
self.create_category( name='Test 0000 Basic Repository Features 1', description='Test 0000 Basic Repository Features 1' )
self.create_category( name='Test 0000 Basic Repository Features 2', description='Test 0000 Basic Repository Features 2' )
+
def test_0010_create_repository( self ):
"""Create the filtering repository"""
self.logout()
@@ -41,6 +45,7 @@
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=strings_displayed )
+
def test_0015_edit_repository( self ):
"""Edit the repository name, description, and long description"""
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -48,17 +53,20 @@
new_description = "Edited filtering tool"
new_long_description = "Edited long description"
self.edit_repository_information( repository, repo_name=new_name, description=new_description, long_description=new_long_description )
+
def test_0020_change_repository_category( self ):
"""Change the categories associated with the filtering repository"""
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.edit_repository_categories( repository,
categories_to_add=[ "Test 0000 Basic Repository Features 2" ],
categories_to_remove=[ "Test 0000 Basic Repository Features 1" ] )
+
def test_0025_grant_write_access( self ):
'''Grant write access to another user'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.grant_write_access( repository, usernames=[ common.test_user_2_name ] )
self.revoke_write_access( repository, common.test_user_2_name )
+
def test_0030_upload_filtering_1_1_0( self ):
"""Upload filtering_1.1.0.tar to the repository"""
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -71,6 +79,7 @@
commit_message="Uploaded filtering 1.1.0",
strings_displayed=[],
strings_not_displayed=[] )
+
def test_0035_verify_repository( self ):
'''Display basic repository pages'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -78,12 +87,23 @@
self.check_for_valid_tools( repository, strings_displayed=[ 'Filter1' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=1 )
tip = self.get_repository_tip( repository )
- self.check_repository_tools_for_changeset_revision( repository, tip )
+ tool_guid = '%s/repos/user1/filtering_0000/Filter1/1.1.0' % self.url.replace( 'http://', '' ).rstrip( '/' )
+ tool_metadata_strings_displayed = [ tool_guid,
+ '1.1.0', # The tool version.
+ 'Filter1', # The tool ID.
+ 'Filter', # The tool name.
+ 'data on any column using simple expressions' ] # The tool description.
+ tool_page_strings_displayed = [ 'Filter (version 1.1.0)' ]
+ self.check_repository_tools_for_changeset_revision( repository,
+ tip,
+ tool_metadata_strings_displayed=tool_metadata_strings_displayed,
+ tool_page_strings_displayed=tool_page_strings_displayed )
self.check_repository_metadata( repository, tip_only=False )
self.browse_repository( repository, strings_displayed=[ 'Browse %s revision' % repository.name, '(repository tip)' ] )
self.display_repository_clone_page( common.test_user_1_name,
repository_name,
strings_displayed=[ 'Uploaded filtering 1.1.0', latest_changeset_revision ] )
+
def test_0040_alter_repository_states( self ):
'''Test toggling the malicious and deprecated repository flags.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -106,6 +126,7 @@
self.set_repository_deprecated( repository,
strings_displayed=[ 'has been marked as not deprecated', 'Mark as deprecated' ],
set_deprecated=False )
+
def test_0045_display_repository_tip_file( self ):
'''Display the contents of filtering.xml in the repository tip revision'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -114,6 +135,7 @@
filepath=None,
strings_displayed=[ '1.1.0' ],
strings_not_displayed=[] )
+
def test_0050_upload_filtering_txt_file( self ):
'''Upload filtering.txt file associated with tool version 1.1.0.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -127,6 +149,7 @@
strings_displayed=[],
strings_not_displayed=[] )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
+
def test_0055_upload_filtering_test_data( self ):
'''Upload filtering test data.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -145,6 +168,7 @@
strings_displayed=[],
strings_not_displayed=[] )
self.check_repository_metadata( repository, tip_only=True )
+
def test_0060_upload_filtering_2_2_0( self ):
'''Upload filtering version 2.2.0'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -157,6 +181,7 @@
commit_message="Uploaded filtering 2.2.0",
strings_displayed=[],
strings_not_displayed=[] )
+
def test_0065_verify_filtering_repository( self ):
'''Verify the new tool versions and repository metadata.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -166,8 +191,19 @@
strings_displayed = [ 'Select a revision' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
- self.check_repository_tools_for_changeset_revision( repository, tip )
+ tool_guid = '%s/repos/user1/filtering_0000/Filter1/2.2.0' % self.url.replace( 'http://', '' ).rstrip( '/' )
+ tool_metadata_strings_displayed = [ tool_guid,
+ '2.2.0', # The tool version.
+ 'Filter1', # The tool ID.
+ 'Filter', # The tool name.
+ 'data on any column using simple expressions' ] # The tool description.
+ tool_page_strings_displayed = [ 'Filter (version 2.2.0)' ]
+ self.check_repository_tools_for_changeset_revision( repository,
+ tip,
+ tool_metadata_strings_displayed=tool_metadata_strings_displayed,
+ tool_page_strings_displayed=tool_page_strings_displayed )
self.check_repository_metadata( repository, tip_only=False )
+
def test_0070_upload_readme_txt_file( self ):
'''Upload readme.txt file associated with tool version 2.2.0.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
@@ -186,22 +222,26 @@
self.display_manage_repository_page( repository,
strings_displayed=[ 'Readme file for filtering 1.1.0',
'This is a readme file.' ] )
+
def test_0075_delete_readme_txt_file( self ):
'''Delete the readme.txt file.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
+
def test_0080_search_for_valid_filter_tool( self ):
'''Search for the filtering tool by tool ID, name, and version.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip_changeset = self.get_repository_tip( repository )
search_fields = dict( tool_id='Filter1', tool_name='filter', tool_version='2.2.0' )
self.search_for_valid_tools( search_fields=search_fields, strings_displayed=[ tip_changeset ], strings_not_displayed=[] )
+
def test_0085_verify_repository_metadata( self ):
'''Verify that resetting the metadata does not change it.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.verify_unchanged_repository_metadata( repository )
+
def test_0090_verify_reserved_repository_name_handling( self ):
'''Check that reserved repository names are handled correctly.'''
category = test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
@@ -212,6 +252,7 @@
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=[ error_message ] )
+
def test_0100_verify_reserved_username_handling( self ):
'''Check that reserved usernames are handled correctly.'''
self.logout()
@@ -220,6 +261,7 @@
assert test_user_1 is None, 'Creating user with public name "repos" succeeded.'
error_message = 'The term <b>repos</b> is a reserved word in the tool shed, so it cannot be used as a public user name.'
self.check_for_strings( strings_displayed=[ error_message ] )
+
def test_0105_contact_repository_owner( self ):
'''Fill out and submit the form to contact the owner of a repository.'''
'''
@@ -239,3 +281,28 @@
strings_displayed=strings_displayed,
post_submit_strings_displayed=post_submit_strings_displayed )
+ def test_0110_delete_filtering_repository( self ):
+ '''Delete the filtering_0000 repository and verify that it no longer has any downloadable revisions.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ self.delete_repository( repository )
+ # Explicitly reload all metadata revisions from the database, to ensure that we have the current status of the downloadable flag.
+ for metadata_revision in repository.metadata_revisions:
+ test_db_util.refresh( metadata_revision )
+ # Marking a repository as deleted should result in no metadata revisions being downloadable.
+ assert True not in [ metadata.downloadable for metadata in repository.metadata_revisions ]
+
+ def test_0115_undelete_filtering_repository( self ):
+ '''Undelete the filtering_0000 repository and verify that it now has two downloadable revisions.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ self.undelete_repository( repository )
+ # Explicitly reload all metadata revisions from the database, to ensure that we have the current status of the downloadable flag.
+ for metadata_revision in repository.metadata_revisions:
+ test_db_util.refresh( metadata_revision )
+ # Marking a repository as undeleted should result in all previously downloadable metadata revisions being downloadable again.
+ # In this case, there should be two downloadable revisions, one for filtering 1.1.0 and one for filtering 2.2.0.
+ assert True in [ metadata.downloadable for metadata in repository.metadata_revisions ]
+ assert len( repository.downloadable_revisions ) == 2
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -5,8 +5,20 @@
repository_description = "Galaxy's freebayes tool"
repository_long_description = "Long description of Galaxy's freebayes tool"
+'''
+1. Create repository freebayes_0020 and upload only the tool XML.
+2. Upload the tool_data_table_conf.xml.sample file.
+3. Upload sam_fa_indices.loc.sample.
+4. Upload a tool_dependencies.xml file that should not parse correctly.
+5. Upload a tool_dependencies.xml file that specifies a version that does not match the tool's requirements.
+6. Upload a valid tool_dependencies.xml file.
+7. Check for the appropriate strings on the manage repository page.
+'''
+
+
class TestFreebayesRepository( ShedTwillTestCase ):
'''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
@@ -19,11 +31,18 @@
admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = test_db_util.get_private_role( admin_user )
+
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+
def test_0010_create_freebayes_repository_and_upload_tool_xml( self ):
- '''Create freebayes repository and upload freebayes.xml without tool_data_table_conf.xml.sample. This should result in an error message and invalid tool.'''
+ '''Create freebayes repository and upload only freebayes.xml.'''
+ '''
+ We are at step 1 - Create repository freebayes_0020 and upload only the tool XML.
+ Uploading only the tool XML file should result in an invalid tool and an error message on
+ upload, as well as on the manage repository page.
+ '''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
category = test_db_util.get_category_by_name( 'Test 0010 Repository With Tool Dependencies' )
@@ -44,11 +63,15 @@
strings_not_displayed=[] )
self.display_manage_repository_page( repository, strings_displayed=[ 'Invalid tools' ], strings_not_displayed=[ 'Valid tools' ] )
tip = self.get_repository_tip( repository )
- self.check_repository_invalid_tools_for_changeset_revision( repository,
- tip,
- strings_displayed=[ 'requires an entry', 'tool_data_table_conf.xml' ] )
+ strings_displayed = [ 'requires an entry', 'tool_data_table_conf.xml' ]
+ self.check_repository_invalid_tools_for_changeset_revision( repository, tip, strings_displayed=strings_displayed )
+
def test_0015_upload_missing_tool_data_table_conf_file( self ):
'''Upload the missing tool_data_table_conf.xml.sample file to the repository.'''
+ '''
+ We are at step 2 - Upload the tool_data_table_conf.xml.sample file.
+ Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change.
+ '''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
filename='freebayes/tool_data_table_conf.xml.sample',
@@ -61,11 +84,15 @@
strings_not_displayed=[] )
self.display_manage_repository_page( repository, strings_displayed=[ 'Invalid tools' ], strings_not_displayed=[ 'Valid tools' ] )
tip = self.get_repository_tip( repository )
- self.check_repository_invalid_tools_for_changeset_revision( repository,
- tip,
- strings_displayed=[ 'refers to a file', 'sam_fa_indices.loc' ] )
+ strings_displayed = [ 'refers to a file', 'sam_fa_indices.loc' ]
+ self.check_repository_invalid_tools_for_changeset_revision( repository, tip, strings_displayed=strings_displayed )
+
def test_0020_upload_missing_sample_loc_file( self ):
'''Upload the missing sam_fa_indices.loc.sample file to the repository.'''
+ '''
+ We are at step 3 - Upload the tool_data_table_conf.xml.sample file.
+ Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change.
+ '''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
filename='freebayes/sam_fa_indices.loc.sample',
@@ -76,8 +103,13 @@
commit_message='Uploaded tool data table .loc file.',
strings_displayed=[],
strings_not_displayed=[] )
+
def test_0025_upload_malformed_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml with bad characters in the readme tag.'''
+ '''
+ We are at step 4 - Upload a tool_dependencies.xml file that should not parse correctly.
+ Upload a tool_dependencies.xml file that contains <> in the text of the readme tag. This should show an error message about malformed xml.
+ '''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'malformed_tool_dependencies', 'tool_dependencies.xml' ),
@@ -88,8 +120,13 @@
commit_message='Uploaded malformed tool dependency XML.',
strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
strings_not_displayed=[] )
+
def test_0030_upload_invalid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.5 of the freebayes package.'''
+ '''
+ We are at step 5 - Upload a tool_dependencies.xml file that specifies a version that does not match the tool's requirements.
+ This should result in a message about the tool dependency configuration not matching the tool's requirements.
+ '''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
@@ -100,8 +137,13 @@
commit_message='Uploaded invalid tool dependency XML.',
strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
strings_not_displayed=[] )
+
def test_0035_upload_valid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package.'''
+ '''
+ We are at step 6 - Upload a valid tool_dependencies.xml file.
+ At this stage, there should be no errors on the upload page, as every missing or invalid file has been corrected.
+ '''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'tool_dependencies.xml' ),
@@ -112,9 +154,14 @@
commit_message='Uploaded valid tool dependency XML.',
strings_displayed=[],
strings_not_displayed=[] )
+
def test_0040_verify_tool_dependencies( self ):
'''Verify that the uploaded tool_dependencies.xml specifies the correct package versions.'''
+ '''
+ We are at step 7 - Check for the appropriate strings on the manage repository page.
+ Verify that the manage repository page now displays the valid tool dependencies, and that there are no invalid tools shown on the manage page.
+ '''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
- self.display_manage_repository_page( repository,
- strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools', 'package' ],
- strings_not_displayed=[ 'Invalid tools' ] )
+ strings_displayed = [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools', 'package' ]
+ strings_not_displayed = [ 'Invalid tools' ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_0070_invalid_tool.py
--- a/test/tool_shed/functional/test_0070_invalid_tool.py
+++ b/test/tool_shed/functional/test_0070_invalid_tool.py
@@ -7,8 +7,10 @@
category_name = 'Test 0070 Invalid Tool Revisions'
category_description = 'Tests for a repository with invalid tool revisions.'
+
class TestBismarkRepository( ShedTwillTestCase ):
'''Testing bismark with valid and invalid tool entries.'''
+
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
@@ -21,6 +23,7 @@
admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = test_db_util.get_private_role( admin_user )
+
def test_0005_create_category_and_repository( self ):
"""Create a category for this test suite, then create and populate a bismark repository. It should contain at least one each valid and invalid tool."""
category = self.create_category( name=category_name, description=category_description )
@@ -54,5 +57,15 @@
strings_not_displayed=[] )
valid_revision = self.get_repository_tip( repository )
test_db_util.refresh( repository )
- self.check_repository_tools_for_changeset_revision( repository, valid_revision )
+ tool_guid = '%s/repos/user1/bismark_0070/bismark_methylation_extractor/0.7.7.3' % self.url.replace( 'http://', '' ).rstrip( '/' )
+ tool_metadata_strings_displayed = [ tool_guid,
+ '0.7.7.3', # The tool version.
+ 'bismark_methylation_extractor', # The tool ID.
+ 'Bismark', # The tool name.
+ 'methylation extractor' ] # The tool description.
+ tool_page_strings_displayed = [ 'Bismark (version 0.7.7.3)' ]
+ self.check_repository_tools_for_changeset_revision( repository,
+ valid_revision,
+ tool_metadata_strings_displayed=tool_metadata_strings_displayed,
+ tool_page_strings_displayed=tool_page_strings_displayed )
self.check_repository_invalid_tools_for_changeset_revision( repository, invalid_revision )
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_0130_datatype_converters.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0130_datatype_converters.py
@@ -0,0 +1,83 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+repository_name = 'bed_to_gff_0130'
+repository_description = "Converter: BED to GFF"
+repository_long_description = "Convert bed to gff"
+
+category_name = 'Test 0130 Datatype Converters'
+category_description = 'Test 0130 Datatype Converters'
+
+'''
+1) Create a populate the bed_to_gff_converter repository
+2) Visit the manage repository page and make sure there is the appropriate valid too and datatype
+3) Visit the view tool metadata page and make sure that "Display in tool panel" is False
+'''
+
+
+class TestDatatypeConverters( ShedTwillTestCase ):
+ '''Test features related to datatype converters.'''
+
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+
+ def test_0005_create_bed_to_gff_repository( self ):
+ '''Create and populate bed_to_gff_0130.'''
+ '''
+ We are at step 1 - Create and populate the bed_to_gff_0130 repository.
+ Create the bed_to_gff_0130 repository and populate it with the files needed for this test.
+ '''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ # Create a repository named bed_to_gff_0130 owned by user1.
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ # Upload bed_to_gff_converter.tar to the repository.
+ self.upload_file( repository,
+ filename='bed_to_gff_converter/bed_to_gff_converter.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=False,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded bed_to_gff_converter.tar.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0010_verify_tool_and_datatype( self ):
+ '''Verify that a valid tool and datatype are contained within the repository.'''
+ '''
+ We are at step 2 - Visit the manage repository page and make sure there is the appropriate valid tool and datatype.
+ There should be a 'Convert BED to GFF' tool and a 'galaxy.datatypes.interval:Bed' datatype with extension 'bed'
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ strings_displayed = [ 'Convert BED to GFF', 'galaxy.datatypes.interval:Bed', 'bed', 'Valid tools', 'Datatypes' ]
+ strings_not_displayed = [ 'Invalid tools' ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+ def test_0015_verify_tool_panel_display( self ):
+ '''Verify that the tool is configured not to be displayed in the tool panel.'''
+ '''
+ We are at step 3
+ Datatype converters that are associated with a datatype should have display in tool panel = False in the tool metadata.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ metadata = self.get_repository_metadata_by_changeset_revision( repository, self.get_repository_tip( repository ) )
+ tool_metadata_strings_displayed = '<label>Display in tool panel:</label>\n False'
+ self.check_repository_tools_for_changeset_revision( repository,
+ self.get_repository_tip( repository ),
+ tool_metadata_strings_displayed=tool_metadata_strings_displayed )
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_0430_browse_utilities.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0430_browse_utilities.py
@@ -0,0 +1,184 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+import logging
+log = logging.getLogger(__name__)
+
+datatypes_repository_name = 'emboss_datatypes_0430'
+datatypes_repository_description = 'Galaxy applicable datatypes for EMBOSS for test 0430'
+datatypes_repository_long_description = 'Long description of Galaxy applicable datatypes for EMBOSS for test 0430'
+
+emboss_repository_name = 'emboss_0430'
+emboss_repository_description = 'EMBOSS tools for test 0430'
+emboss_repository_long_description = 'Long description of EMBOSS tools for test 0430'
+
+freebayes_repository_name = 'freebayes_0430'
+freebayes_repository_description = 'Freebayes tool for test 0430'
+freebayes_repository_long_description = 'Long description of Freebayes tool for test 0430'
+
+
+'''
+1. Create and populate repositories.
+2. Browse Custom Datatypes.
+3. Browse Tools.
+4. Browse Repository Dependencies.
+5. Browse Tool Dependencies.
+'''
+
+
+class TestToolShedBrowseUtilities( ShedTwillTestCase ):
+ '''Test browsing for Galaxy utilities.'''
+
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other tests.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+
+ def test_0005_create_datatypes_repository( self ):
+ """Create and populate the emboss_datatypes_0430 repository"""
+ """
+ We are at step 1.
+ Create and populate the repository that will contain one or more datatypes.
+ """
+ category = self.create_category( name='Test 0430 Galaxy Utilities',
+ description='Description of Test 0430 Galaxy Utilities category' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % datatypes_repository_name,
+ 'Repository %s has been created' % "'%s'" % datatypes_repository_name ]
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='emboss/datatypes/datatypes_conf.xml',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded datatypes_conf.xml.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0010_create_emboss_repository( self ):
+ """Create and populate the emboss_0430 repository"""
+ """
+ We are at step 1.
+ Create the emboss_0430 repository, and populate it with tools.
+ """
+ category = self.create_category( name='Test 0430 Galaxy Utilities',
+ description='Description of Test 0430 Galaxy Utilities category' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % emboss_repository_name,
+ 'Repository %s has been created' % "'%s'" % emboss_repository_name ]
+ emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( emboss_repository,
+ filename='emboss/emboss.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded emboss.tar.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0015_create_dependency_on_datatypes_repository( self ):
+ '''Create a dependency definition file that specifies emboss_datatypes_0430 and upload it to emboss_0430.'''
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ dependency_xml_path = self.generate_temp_path( 'test_0430', additional_paths=[ 'dependencies' ] )
+ self.create_repository_dependency( emboss_repository, depends_on=[ datatypes_repository ], filepath=dependency_xml_path )
+ self.check_repository_dependency( emboss_repository, datatypes_repository )
+
+ def test_0020_create_tool_dependency_repository( self ):
+ """Create and populate the freebayes_0430 repository"""
+ """
+ We are at step 1.
+ Create and populate the repository that will have a tool dependency defined.
+ """
+ category = self.create_category( name='Test 0430 Galaxy Utilities',
+ description='Description of Test 0430 Galaxy Utilities category' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % freebayes_repository_name,
+ 'Repository %s has been created' % "'%s'" % freebayes_repository_name ]
+ repository = self.get_or_create_repository( name=freebayes_repository_name,
+ description=freebayes_repository_description,
+ long_description=freebayes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='freebayes/freebayes.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded freebayes.tar.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0025_browse_custom_datatypes( self ):
+ '''Load the page to browse custom datatypes.'''
+ '''
+ We are at step 2.
+ Verify that the uploaded emboss datatypes repository has added to the custom datatypes page.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ changeset_revision = self.get_repository_tip( repository )
+ strings_displayed = [ 'equicktandem', 'est2genome', 'supermatcher', 'galaxy.datatypes.data:Text', changeset_revision, 'user1', 'emboss_datatypes_0430' ]
+ self.browse_custom_datatypes( strings_displayed=strings_displayed )
+
+ def test_0030_browse_tools( self ):
+ '''Load the page to browse tools.'''
+ '''
+ We are at step 3.
+ Verify the existence of emboss tools in the browse tools page.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ changeset_revision = self.get_repository_tip( repository )
+ strings_displayed = [ 'EMBOSS', 'antigenic1', '5.0.0', changeset_revision, 'user1', 'emboss_0430' ]
+ self.browse_tools( strings_displayed=strings_displayed )
+
+ def test_0035_browse_repository_dependencies( self ):
+ '''Browse repository dependencies and look for a dependency on emboss_datatypes_0430.'''
+ '''
+ We are at step 3.
+ Verify that the browse repository dependencies page shows emboss_datatypes_0430 as a dependency of emboss_0430.
+ '''
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ datatypes_changeset_revision = self.get_repository_tip( datatypes_repository )
+ emboss_changeset_revision = self.get_repository_tip( emboss_repository )
+ strings_displayed = [ datatypes_changeset_revision, emboss_changeset_revision, 'emboss_datatypes_0430', 'user1', 'emboss_0430' ]
+ self.browse_repository_dependencies( strings_displayed=strings_displayed )
+
+ def test_0040_browse_tool_dependencies( self ):
+ '''Browse tool dependencies and look for the right versions of freebayes and samtools.'''
+ '''
+ We are at step 4.
+ Verify that the browse tool dependencies page shows the correct dependencies defined for freebayes_0430.
+ '''
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ freebayes_changeset_revision = self.get_repository_tip( freebayes_repository )
+ strings_displayed = [ freebayes_changeset_revision, 'freebayes_0430', 'user1', '0.9.4_9696d0ce8a96', 'freebayes', 'samtools', '0.1.18' ]
+ self.browse_tool_dependencies( strings_displayed=strings_displayed )
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_1000_install_basic_repository.py
--- a/test/tool_shed/functional/test_1000_install_basic_repository.py
+++ b/test/tool_shed/functional/test_1000_install_basic_repository.py
@@ -1,8 +1,10 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
+
class BasicToolShedFeatures( ShedTwillTestCase ):
'''Test installing a basic repository.'''
+
def test_0000_initiate_users( self ):
"""Create necessary user accounts."""
self.logout()
@@ -20,6 +22,7 @@
galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
def test_0005_ensure_repositories_and_categories_exist( self ):
'''Create the 0000 category and upload the filtering repository to it, if necessary.'''
category = self.create_category( name='Test 0000 Basic Repository Features 1', description='Test 0000 Basic Repository Features 1' )
@@ -68,6 +71,7 @@
commit_message='Uploaded readme for 2.2.0',
strings_displayed=[],
strings_not_displayed=[] )
+
def test_0010_browse_tool_sheds( self ):
"""Browse the available tool sheds in this Galaxy instance."""
self.galaxy_logout()
@@ -75,13 +79,16 @@
self.visit_galaxy_url( '/admin_toolshed/browse_tool_sheds' )
self.check_page_for_string( 'Embedded tool shed for functional tests' )
self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 2' ] )
+
def test_0015_browse_test_0000_category( self ):
'''Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.'''
category = test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
self.browse_category( category, strings_displayed=[ 'filtering_0000' ] )
+
def test_0020_preview_filtering_repository( self ):
'''Load the preview page for the filtering_0000 repository in the tool shed.'''
self.preview_repository_in_tool_shed( 'filtering_0000', common.test_user_1_name, strings_displayed=[ 'filtering_0000', 'Valid tools' ] )
+
def test_0025_install_filtering_repository( self ):
self.install_repository( 'filtering_0000',
common.test_user_1_name,
@@ -97,6 +104,24 @@
strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
- def test_0030_verify_installed_repository_metadata( self ):
+
+ def test_0030_install_filtering_repository_again( self ):
+ '''Attempt to install the already installed filtering repository, and check for the resulting error message.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+ post_submit_strings_displayed = [ installed_repository.name,
+ installed_repository.owner,
+ installed_repository.installed_changeset_revision,
+ 'was previously installed',
+ 'to manage the repository' ]
+ self.install_repository( 'filtering_0000',
+ common.test_user_1_name,
+ 'Test 0000 Basic Repository Features 1',
+ post_submit_strings_displayed=post_submit_strings_displayed )
+ strings_displayed = [ 'filtering_0000',
+ 'user1',
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+
+ def test_0035_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
self.verify_installed_repository_metadata_unchanged( 'filtering_0000', common.test_user_1_name )
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_1130_datatype_converters.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1130_datatype_converters.py
@@ -0,0 +1,94 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+repository_name = 'bed_to_gff_0130'
+repository_description = "Converter: BED to GFF"
+repository_long_description = "Convert bed to gff"
+
+category_name = 'Test 0130 Datatype Converters'
+category_description = 'Test 0130 Datatype Converters'
+
+'''
+1) Install the bed_to_gff_converter repository.
+2) Make sure the page section to select a tool panel section is NOT displayed since the tool will not be displayed in the Galaxy tool panel.
+3) Make sure the bed_to_gff_converter tool is not displayed in the tool panel.
+'''
+
+
+class TestDatatypeConverters( ShedTwillTestCase ):
+ '''Test features related to datatype converters.'''
+
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+
+ def test_0005_create_bed_to_gff_repository( self ):
+ '''Create and populate bed_to_gff_0130.'''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ # Create a repository named bed_to_gff_0130 owned by user1.
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ # Upload bed_to_gff_converter.tar to the repository, if the repository is new.
+ self.upload_file( repository,
+ filename='bed_to_gff_converter/bed_to_gff_converter.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=False,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded bed_to_gff_converter.tar.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0010_install_datatype_converter_to_galaxy( self ):
+ '''Install bed_to_gff_converter_0130 into the running Galaxy instance.'''
+ '''
+ We are at step 1 - Install the bed_to_gff_converter repository.
+ Install bed_to_gff_converter_0130, checking that the option to select the tool panel section is *not* displayed.
+ '''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ preview_strings_displayed = [ repository.name, self.get_repository_tip( repository ) ]
+ strings_displayed = [ 'Choose the configuration file' ]
+ strings_not_displayed = [ 'tool panel section' ]
+ self.install_repository( repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ preview_strings_displayed=preview_strings_displayed,
+ strings_displayed=strings_displayed,
+ strings_not_displayed=strings_not_displayed,
+ post_submit_strings_displayed=[ repository.name, 'new' ],
+ includes_tools_for_display_in_tool_panel=False )
+
+ def test_0015_uninstall_and_verify_tool_panel_section( self ):
+ '''Uninstall bed_to_gff_converter_0130 and verify that the saved tool_panel_section is None.'''
+ '''
+ We are at step 3 - Make sure the bed_to_gff_converter tool is not displayed in the tool panel.
+ The previous tool panel section for a tool is only recorded in the metadata when a repository is uninstalled,
+ so we have to uninstall it first, then verify that it was not assigned a tool panel section.
+ '''
+ repository = test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+ self.uninstall_repository( repository, remove_from_disk=True )
+ self.verify_installed_repository_no_tool_panel_section( repository )
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional/test_1410_update_manager.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1410_update_manager.py
@@ -0,0 +1,125 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+import logging, time
+log = logging.getLogger(__name__)
+
+repository_name = 'filtering_1410'
+repository_description = "Galaxy's filtering tool"
+repository_long_description = "Long description of Galaxy's filtering repository"
+
+category_name = 'Test 1410 - Galaxy Update Manager'
+category_description = 'Functional test suite to test the update manager.'
+
+'''
+1. Create and populate the filtering_1410 repository.
+2. Install filtering_1410 to Galaxy.
+3. Upload a readme file.
+4. Verify that the browse page now shows an update available.
+'''
+
+
+class TestUpdateManager( ShedTwillTestCase ):
+ '''Test the Galaxy update manager.'''
+
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other tests.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+ def test_0005_create_filtering_repository( self ):
+ '''Create and populate the filtering_1410 repository.'''
+ '''
+ We are at step 1 - Create and populate the filtering_1410 repository.
+ Create filtering_1410 and upload the tool tarball to it.
+ '''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ) )
+ self.upload_file( repository,
+ filename='filtering/filtering_1.1.0.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=True,
+ commit_message="Uploaded filtering 1.1.0",
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0010_install_filtering_repository( self ):
+ '''Install the filtering_1410 repository.'''
+ '''
+ We are at step 2 - Install filtering_1410 to Galaxy.
+ Install the filtering repository to Galaxy.
+ '''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( 'filtering_1410',
+ common.test_user_1_name,
+ category_name,
+ new_tool_panel_section='test_1410' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_1410', common.test_user_1_name )
+ strings_displayed = [ 'filtering_1410',
+ "Galaxy's filtering tool",
+ 'user1',
+ self.url.replace( 'http://', '' ),
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+ def test_0015_upload_readme_file( self ):
+ '''Upload readme.txt to filtering_1410.'''
+ '''
+ We are at step 3 - Upload a readme file.
+ Upload readme.txt. This will have the effect of making the installed changeset revision not be the most recent downloadable revision,
+ but without generating a second downloadable revision. Then sleep for 3 seconds to make sure the update manager picks up the new
+ revision.
+ '''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ filename='readme.txt',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message="Uploaded readme.txt",
+ strings_displayed=[],
+ strings_not_displayed=[] )
+
+ def test_0020_check_for_displayed_update( self ):
+ '''Browse installed repositories and verify update.'''
+ '''
+ We are at step 4 - Verify that the browse page now shows an update available.
+ The browse page should now show filtering_1410 as installed, but with a yellow box indicating that there is an update available.
+ '''
+ # Wait 3 seconds, just to be sure we're past hours_between_check.
+ time.sleep( 3 )
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.display_galaxy_browse_repositories_page( strings_displayed=[ 'state-color-running' ] )
+
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -278,8 +278,10 @@
database_connection = galaxy_database_connection,
database_engine_option_pool_size = '10',
datatype_converters_config_file = "datatype_converters_conf.xml.sample",
+ enable_tool_shed_check = True,
file_path = galaxy_file_path,
global_conf = global_conf,
+ hours_between_check = 0.001,
id_secret = 'changethisinproductiontoo',
job_queue_workers = 5,
log_destination = "stdout",
diff -r 34dfad3b8811968a7988de90817f499dc7ee860c -r 9159442d180f9f7baf282b93ad70ce6ddbecec32 test/tool_shed/test_data/bed_to_gff_converter/bed_to_gff_converter.tar
Binary file test/tool_shed/test_data/bed_to_gff_converter/bed_to_gff_converter.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Allow reloading Data Managers of the same ID.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1a68cc962622/
changeset: 1a68cc962622
user: dan
date: 2013-03-13 16:55:17
summary: Allow reloading Data Managers of the same ID.
affected #: 2 files
diff -r 5b8d2cb81bd023c8d62d8038c37d696b38aa91a7 -r 1a68cc9626220afa542f0ac00951511e7eae56bf lib/galaxy/tools/data_manager/manager.py
--- a/lib/galaxy/tools/data_manager/manager.py
+++ b/lib/galaxy/tools/data_manager/manager.py
@@ -25,8 +25,8 @@
self.filename = xml_filename or self.app.config.data_manager_config_file
self.load_from_xml( self.filename )
if self.app.config.shed_data_manager_config_file:
- self.load_from_xml( self.app.config.shed_data_manager_config_file, store_tool_path=False )
- def load_from_xml( self, xml_filename, store_tool_path=True ):
+ self.load_from_xml( self.app.config.shed_data_manager_config_file, store_tool_path=False, replace_existing=True )
+ def load_from_xml( self, xml_filename, store_tool_path=True, replace_existing=False ):
try:
tree = util.parse_xml( xml_filename )
except Exception, e:
@@ -44,19 +44,24 @@
tool_path = '.'
self.tool_path = tool_path
for data_manager_elem in root.findall( 'data_manager' ):
- self.load_manager_from_elem( data_manager_elem )
- def load_manager_from_elem( self, data_manager_elem, tool_path=None, add_manager=True ):
+ self.load_manager_from_elem( data_manager_elem, replace_existing=replace_existing )
+ def load_manager_from_elem( self, data_manager_elem, tool_path=None, add_manager=True, replace_existing=False ):
try:
data_manager = DataManager( self, data_manager_elem, tool_path=tool_path )
except Exception, e:
log.error( "Error loading data_manager '%s':\n%s" % ( e, util.xml_to_string( data_manager_elem ) ) )
return None
if add_manager:
- self.add_manager( data_manager )
+ self.add_manager( data_manager, replace_existing=replace_existing )
log.debug( 'Loaded Data Manager: %s' % ( data_manager.id ) )
return data_manager
- def add_manager( self, data_manager ):
- assert data_manager.id not in self.data_managers, "A data manager has been defined twice: %s" % ( data_manager.id )
+ def add_manager( self, data_manager, replace_existing=False ):
+ if not replace_existing:
+ assert data_manager.id not in self.data_managers, "A data manager has been defined twice: %s" % ( data_manager.id )
+ elif data_manager.id in self.data_managers:
+ # Data Manager already exists, remove first one and replace with new one
+ log.warning( "A data manager has been defined twice and will be replaced with the last loaded version: %s" % ( data_manager.id ) )
+ self.remove_manager( data_manager.id )
self.data_managers[ data_manager.id ] = data_manager
for data_table_name in data_manager.data_tables.keys():
if data_table_name not in self.managed_data_tables:
diff -r 5b8d2cb81bd023c8d62d8038c37d696b38aa91a7 -r 1a68cc9626220afa542f0ac00951511e7eae56bf lib/tool_shed/util/shed_util.py
--- a/lib/tool_shed/util/shed_util.py
+++ b/lib/tool_shed/util/shed_util.py
@@ -1261,7 +1261,7 @@
tool_elem = suc.generate_tool_elem( repository.tool_shed, repository.name, repository.installed_changeset_revision,
repository.owner, tool_config_filename, tool, None )
elem.insert( 0, tool_elem )
- data_manager = app.data_managers.load_manager_from_elem( elem, tool_path=shed_config_dict.get( 'tool_path', '' ) )
+ data_manager = app.data_managers.load_manager_from_elem( elem, tool_path=shed_config_dict.get( 'tool_path', '' ), replace_existing=True )
if data_manager:
rval.append( data_manager )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Change community to tool_shed in manage_db.sh notes.
by commits-noreply@bitbucket.org 13 Mar '13
by commits-noreply@bitbucket.org 13 Mar '13
13 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5b8d2cb81bd0/
changeset: 5b8d2cb81bd0
user: dan
date: 2013-03-13 16:15:07
summary: Change community to tool_shed in manage_db.sh notes.
affected #: 1 file
diff -r 07262aa197881ac0d69be70987f8fb89e0564570 -r 5b8d2cb81bd023c8d62d8038c37d696b38aa91a7 manage_db.sh
--- a/manage_db.sh
+++ b/manage_db.sh
@@ -2,7 +2,7 @@
#######
# NOTE: To downgrade to a specific version, use something like:
-# sh manage_db.sh downgrade --version=3 <community if using that webapp - galaxy is the default>
+# sh manage_db.sh downgrade --version=3 <tool_shed if using that webapp - galaxy is the default>
#######
cd `dirname $0`
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: carlfeberhard: browser tests: remove error handlers inside tryStepsCatch; add HDA state tests; begin moving selectors and text to central locations
by commits-noreply@bitbucket.org 12 Mar '13
by commits-noreply@bitbucket.org 12 Mar '13
12 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/07262aa19788/
changeset: 07262aa19788
user: carlfeberhard
date: 2013-03-12 22:32:41
summary: browser tests: remove error handlers inside tryStepsCatch; add HDA state tests; begin moving selectors and text to central locations
affected #: 6 files
diff -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 -r 07262aa197881ac0d69be70987f8fb89e0564570 test/casperjs/anon-history-tests.js
--- a/test/casperjs/anon-history-tests.js
+++ b/test/casperjs/anon-history-tests.js
@@ -39,23 +39,22 @@
spaceghost.info( 'Will use fixtureData.testUser: ' + email );
}
-var galaxyCookieName = 'galaxysession',
-
- nameSelector = 'div#history-name',
- unnamedName = 'Unnamed history',
- subtitleSelector = 'div#history-subtitle-area',
- initialSizeStr = '0 bytes',
- tagIconSelector = '#history-tag.icon-button',
- annoIconSelector = '#history-annotate.icon-button',
- //emptyMsgSelector = '#emptyHistoryMessage';
- emptyMsgSelector = '.infomessagesmall',
- emptyMsgStr = "Your history is empty. Click 'Get Data' on the left pane to start",
-
- tooltipSelector = '.bs-tooltip',
- anonNameTooltip = 'You must be logged in to edit your history name',
+var tooltipSelector = '.bs-tooltip',
editableTextClass = 'editable-text',
- editableTextInputSelector = 'input#renaming-active';
+ editableTextInputSelector = 'input#renaming-active',
+
+ galaxyCookieName = 'galaxysession';
+
+ unnamedName = spaceghost.historypanel.data.text.history.newName,
+ nameSelector = spaceghost.historypanel.data.selectors.history.name,
+ subtitleSelector = spaceghost.historypanel.data.selectors.history.subtitle,
+ initialSizeStr = spaceghost.historypanel.data.text.history.newSize,
+ tagIconSelector = spaceghost.historypanel.data.selectors.history.tagIcon,
+ annoIconSelector = spaceghost.historypanel.data.selectors.history.annoIcon,
+ emptyMsgSelector = spaceghost.historypanel.data.selectors.history.emptyMsg,
+ emptyMsgStr = spaceghost.historypanel.data.text.history.emptyMsg,
+ anonNameTooltip = spaceghost.historypanel.data.text.anonymous.tooltips.name;
var historyFrameInfo = {},
testUploadInfo = {};
@@ -111,8 +110,8 @@
this.test.assertDoesntExist( tagIconSelector, 'Tag icon button not found' );
this.test.assertDoesntExist( annoIconSelector, 'Annotation icon button not found' );
+ this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
this.test.comment( "A message about the current history being empty should be displayed" );
- this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
this.test.assertVisible( emptyMsgSelector, 'Empty history message is visible' );
this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
'Message contains "' + emptyMsgStr + '"' );
diff -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 -r 07262aa197881ac0d69be70987f8fb89e0564570 test/casperjs/casperjs_runner.py
--- a/test/casperjs/casperjs_runner.py
+++ b/test/casperjs/casperjs_runner.py
@@ -344,6 +344,15 @@
self.run_js_script( 'anon-history-tests.js' )
+class Test_04_HDAs( CasperJSTestCase ):
+ """(Minimal) casperjs tests for tools.
+ """
+ def test_00_HDA_states( self ):
+ """Test structure rendering of HDAs in all the possible HDA states
+ """
+ self.run_js_script( 'hda-state-tests.js' )
+
+
# ==================================================================== MAIN
if __name__ == '__main__':
diff -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 -r 07262aa197881ac0d69be70987f8fb89e0564570 test/casperjs/hda-state-tests.js
--- /dev/null
+++ b/test/casperjs/hda-state-tests.js
@@ -0,0 +1,140 @@
+// have to handle errors here - or phantom/casper won't bail but _HANG_
+try {
+ var utils = require( 'utils' ),
+ xpath = require( 'casper' ).selectXPath,
+ format = utils.format,
+
+ //...if there's a better way - please let me know, universe
+ scriptDir = require( 'system' ).args[3]
+ // remove the script filename
+ .replace( /[\w|\.|\-|_]*$/, '' )
+ // if given rel. path, prepend the curr dir
+ .replace( /^(?!\/)/, './' ),
+ spaceghost = require( scriptDir + 'spaceghost' ).create({
+ // script options here (can be overridden by CLI)
+ //verbose: true,
+ //logLevel: debug,
+ scriptDir: scriptDir
+ });
+
+ spaceghost.start();
+
+} catch( error ){
+ console.debug( error );
+ phantom.exit( 1 );
+}
+
+
+// ===================================================================
+/* TODO:
+ currently going to fake states via JS
+ - better if we can capture actual hdas in these states
+ - easier said than done - API?
+*/
+// =================================================================== globals and helpers
+var email = spaceghost.user.getRandomEmail(),
+ password = '123456';
+if( spaceghost.fixtureData.testUser ){
+ email = spaceghost.fixtureData.testUser.email;
+ password = spaceghost.fixtureData.testUser.password;
+ spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+}
+
+var newHistoryName = "Test History",
+ historyFrameInfo = {},
+ filepathToUpload = '../../test-data/1.txt',
+ possibleHDAStates = [],
+ testUploadInfo = {};
+
+// ------------------------------------------------------------------- set up
+// start a new user
+spaceghost.user.loginOrRegisterUser( email, password );
+// ??: why is a reload needed here? If we don't, loggedInAs === '' ...
+spaceghost.thenOpen( spaceghost.baseUrl, function(){
+ var loggedInAs = spaceghost.user.loggedInAs();
+ this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
+});
+
+// grab the history frame bounds for mouse later tests
+spaceghost.then( function(){
+ historyFrameInfo = this.getElementInfo( 'iframe[name="galaxy_history"]' );
+ //this.debug( 'historyFrameInfo:' + this.jsonStr( historyFrameInfo ) );
+});
+
+// upload a file
+spaceghost.then( function upload(){
+ spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
+ testUploadInfo = _uploadInfo;
+ this.info( 'testUploadInfo:' + this.jsonStr( testUploadInfo ) );
+ });
+});
+
+spaceghost.then( function getHDAStates(){
+ this.withFrame( this.selectors.frames.history, function(){
+ var model = this.evaluate( function(){
+ return Galaxy.currHistoryPanel.model.hdas.at( 0 ).attributes;
+ });
+ this.info( 'model:' + this.jsonStr( model ) );
+ });
+});
+
+spaceghost.then( function checkNewState(){
+ this.test.comment( 'HDAs in the "new" state should be well formed' );
+
+ this.withFrame( this.selectors.frames.history, function(){
+ // set state directly through model
+ //TODO: not ideal
+ this.evaluate( function(){
+ return Galaxy.currHistoryPanel.model.hdas.at( 0 ).set( 'state', 'new' );
+ });
+ // wait for re-render
+ this.wait( 500, function(){
+ var uploadSelector = '#' + testUploadInfo.hdaElement.attributes.id;
+ this.test.assertVisible( uploadSelector, 'HDA is visible' );
+
+ // should have proper title and hid
+ var titleSelector = uploadSelector + ' .historyItemTitle';
+ this.test.assertVisible( titleSelector, 'HDA title is visible' );
+ this.test.assertSelectorHasText( titleSelector, testUploadInfo.name,
+ 'HDA has proper title' );
+ this.test.assertSelectorHasText( titleSelector, testUploadInfo.hid,
+ 'HDA has proper hid' );
+
+ // should have the new state class
+ var newStateClass = 'historyItem-new',
+ uploadElement = this.getElementInfo( uploadSelector );
+ this.test.assert( uploadElement.attributes['class'].indexOf( newStateClass ) !== -1,
+ 'HDA has new state class' );
+
+ // since we're using css there's no great way to test this
+ //var stateIconSelector = uploadSelector + ' .state-icon';
+ //this.test.assertVisible( stateIconSelector, 'HDA has proper hid' );
+
+ // should NOT have any of the three, main buttons
+ var buttonSelector = uploadSelector + ' .historyItemButtons a';
+ this.test.assertDoesntExist( buttonSelector, 'No display, edit, or delete buttons' );
+
+ // expand and check the body
+ this.click( titleSelector );
+ this.wait( 500, function(){
+ var bodySelector = uploadSelector + ' .historyItemBody';
+ this.test.assertVisible( bodySelector, 'HDA body is visible (after expanding)' );
+
+ var expectedBodyText = 'This is a new dataset';
+ this.test.assertSelectorHasText( bodySelector, expectedBodyText,
+ 'HDA body has text: ' + expectedBodyText );
+
+ // restore to collapsed
+ this.click( titleSelector );
+ });
+ });
+ });
+});
+
+// =================================================================== TESTS
+
+
+// ===================================================================
+spaceghost.run( function(){
+ this.test.done();
+});
diff -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 -r 07262aa197881ac0d69be70987f8fb89e0564570 test/casperjs/history-panel-tests.js
--- a/test/casperjs/history-panel-tests.js
+++ b/test/casperjs/history-panel-tests.js
@@ -24,7 +24,6 @@
phantom.exit( 1 );
}
-
// ===================================================================
/* TODO:
possibly break this file up
@@ -38,41 +37,36 @@
spaceghost.info( 'Will use fixtureData.testUser: ' + email );
}
-var nameSelector = 'div#history-name',
- unnamedName = 'Unnamed history',
- subtitleSelector = 'div#history-subtitle-area',
- initialSizeStr = '0 bytes',
- tagIconSelector = '#history-tag.icon-button',
- annoIconSelector = '#history-annotate.icon-button',
- emptyMsgSelector = '.infomessagesmall',
- emptyMsgStr = "Your history is empty. Click 'Get Data' on the left pane to start",
+// selectors and labels
+var nameSelector = spaceghost.historypanel.data.selectors.history.name,
+ subtitleSelector = spaceghost.historypanel.data.selectors.history.subtitle,
+ unnamedName = spaceghost.historypanel.data.text.history.newName,
+ initialSizeStr = spaceghost.historypanel.data.text.history.newSize,
+ tagIconSelector = spaceghost.historypanel.data.selectors.history.tagIcon,
+ annoIconSelector = spaceghost.historypanel.data.selectors.history.annoIcon,
+ emptyMsgSelector = spaceghost.historypanel.data.selectors.history.emptyMsg,
+ emptyMsgStr = spaceghost.historypanel.data.text.history.emptyMsg,
+ wrapperOkClassName = spaceghost.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
+ tagAreaSelector = spaceghost.historypanel.data.selectors.history.tagArea,
+ annoAreaSelector = spaceghost.historypanel.data.selectors.history.annoArea,
+ nameTooltip = spaceghost.historypanel.data.text.history.tooltips.name,
tooltipSelector = '.bs-tooltip',
- nameTooltip = 'Click to rename history',
editableTextClass = 'editable-text',
editableTextInputSelector = 'input#renaming-active',
- wrapperOkClassName = 'historyItem-ok',
-
- tagAreaSelector = '#history-tag-area',
- annoAreaSelector = '#history-annotation-area',
refreshButtonSelector = 'a#history-refresh-button',
refreshButtonIconSelector = 'span.fa-icon-refresh',
refreshButtonHref = '/history',
- //historyOptionsButtonSelector = '#history-options-button',
- //historyOptionsButtonIconSelector = 'span.fa-icon-cog',
includeDeletedOptionsLabel = spaceghost.historyoptions.data.labels.options.includeDeleted;
-function historyOptionXpathByLabel( label ){
- return xpath( '//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"' + label + '")]]' );
-}
-
+// local
var newHistoryName = "Test History",
filepathToUpload = '../../test-data/1.txt',
historyFrameInfo = {},
- testUploadInfo = {};
+ uploadInfo = {};
// =================================================================== TESTS
@@ -184,7 +178,7 @@
// ------------------------------------------------------------------- check structure of NON empty history
// upload file: 1.txt
spaceghost.then( function upload(){
- this.test.comment( 'should be able to upload files' );
+ this.test.comment( 'uploaded file should appear in history' );
spaceghost.tools.uploadFile( filepathToUpload, function uploadCallback( _uploadInfo ){
this.debug( 'uploaded HDA info: ' + this.jsonStr( _uploadInfo ) );
var hasHda = _uploadInfo.hdaElement,
@@ -224,27 +218,21 @@
// ------------------------------------------------------------------- tags
// keeping this light here - better for it's own test file
+//TODO: check tooltips
spaceghost.then( function openTags(){
this.test.comment( 'tag area should open when the history panel tag icon is clicked' );
this.withFrame( this.selectors.frames.history, function(){
+ this.capture( 'tag-area.png' );
this.mouseEvent( 'click', tagIconSelector );
this.wait( 1000, function(){
this.test.assertVisible( tagAreaSelector, 'Tag area is now displayed' );
});
});
});
-spaceghost.then( function closeTags(){
- this.test.comment( 'tag area should close when the history panel tag icon is clicked again' );
- this.withFrame( this.selectors.frames.history, function(){
- this.mouseEvent( 'click', tagIconSelector );
- this.wait( 1000, function(){
- this.test.assertNotVisible( tagAreaSelector, 'Tag area is now hidden' );
- });
- });
-});
// ------------------------------------------------------------------- annotation
// keeping this light here - better for it's own test file
+//TODO: check tooltips
spaceghost.then( function openAnnotation(){
this.test.comment( 'annotation area should open when the history panel annotation icon is clicked' );
this.withFrame( this.selectors.frames.history, function(){
@@ -346,7 +334,8 @@
this.withFrame( this.selectors.frames.history, function(){
this.waitForSelector( nameSelector, function(){
//TODO: to conv. fn
- this.click( '#' + uploadInfo.hdaElement.attributes.id + ' .historyItemUndelete' );
+ this.click( '#' + uploadInfo.hdaElement.attributes.id
+ + ' ' + this.historypanel.data.selectors.history.undeleteLink );
});
});
});
diff -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 -r 07262aa197881ac0d69be70987f8fb89e0564570 test/casperjs/modules/historypanel.js
--- a/test/casperjs/modules/historypanel.js
+++ b/test/casperjs/modules/historypanel.js
@@ -31,7 +31,6 @@
*/
// =================================================================== INTERNAL
-
// =================================================================== API (external)
/** Find the casper element info of the hda wrapper given the hda title and hid.
* NOTE: if more than one is found, will return the first found.
@@ -46,6 +45,7 @@
wrapperInfo = null;
//NOTE: depends on jquery
+ //TODO: move to xpath
wrapperInfo = spaceghost.evaluate( function( titleContains ){
// find the title, then the wrapper (2 containers up)
var $title = $( '.historyItemTitle:contains(' + titleContains + ')' );
@@ -199,3 +199,54 @@
});
return spaceghost;
};
+
+
+// =================================================================== SELECTORS
+//TODO: data is not a very good name
+HistoryPanel.prototype.data = {
+ selectors : {
+ history : {
+ name : 'div#history-name',
+ subtitle : 'div#history-subtitle-area',
+ tagIcon : '#history-tag.icon-button',
+ tagArea : '#history-tag-area',
+ annoIcon : '#history-annotate.icon-button',
+ annoArea : '#history-annotation-area',
+ emptyMsg : '.infomessagesmall',
+ undeleteLink : '.historyItemUndelete'
+ },
+ hda : {
+ wrapper : {
+ stateClasses : {
+ prefix : 'historyItem-',
+ ok : 'historyItem-ok'
+ }
+ }
+ }
+ },
+ labels : {
+ history : {
+ },
+ hda : {
+ }
+ },
+ text : {
+ anonymous : {
+ tooltips : {
+ name : 'You must be logged in to edit your history name'
+ }
+ },
+ history : {
+ tooltips : {
+ name : 'Click to rename history',
+ tagIcon : 'Edit history tags',
+ annoIcon : 'Edit history annotation'
+ },
+ newName : 'Unnamed history',
+ newSize : '0 bytes',
+ emptyMsg : "Your history is empty. Click 'Get Data' on the left pane to start"
+ },
+ hda : {
+ }
+ }
+};
diff -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 -r 07262aa197881ac0d69be70987f8fb89e0564570 test/casperjs/spaceghost.js
--- a/test/casperjs/spaceghost.js
+++ b/test/casperjs/spaceghost.js
@@ -120,7 +120,7 @@
// save errors for later output (needs to go before process CLI)
this.errors = [];
- this.on( 'error', function( msg, backtrace ){
+ this.on( 'error', function pushErrorToStack( msg, backtrace ){
//this.debug( 'adding error to stack: ' + msg + ', trace:' + JSON.stringify( backtrace, null, 2 ) );
this.errors.push({ msg: msg, backtrace: backtrace });
});
@@ -187,6 +187,8 @@
//screenOnError : { defaultsTo: false, flag: 'error-screen', help: 'capture a screenshot on a page error' },
//textOnError : { defaultsTo: false, flag: 'error-text', help: 'output page text on a page error' },
//htmlOnError : { defaultsTo: false, flag: 'error-html', help: 'output page html on a page error' }
+ //htmlOnFail : { defaultsTo: false, flag: 'fail-html', help: 'output page html on a test failure' },
+ //screenOnFail : { defaultsTo: false, flag: 'fail-screen', help: 'capture a screenshot on a test failure' }
};
// --url parameter required (the url of the server to test with)
@@ -196,17 +198,16 @@
}
this.baseUrl = this.cli.get( 'url' );
+ //TODO: move these handlers into _setUpEventHandlers
// --return-json: supress all output except for JSON logs, test results, and errors at finish
// this switch allows a testing suite to send JSON data back via stdout (w/o logs, echos interferring)
this.options.returnJsonOnly = CLI_OPTIONS.returnJsonOnly.defaultsTo;
if( this.cli.has( CLI_OPTIONS.returnJsonOnly.flag ) ){
this.options.returnJsonOnly = true;
- //this._suppressOutput();
this._redirectOutputToStderr();
-
// output json on fail-first error
- this.on( 'error', function( msg, backtrace ){
+ this.on( 'error', function outputJSONOnError( msg, backtrace ){
//console.debug( 'return-json caught error' );
if( spaceghost.options.exitOnError ){
this.outputStateAsJson();
@@ -216,6 +217,7 @@
// non-error finshes/json-output are handled in run() for now
}
+ //TODO: remove boilerplate
// --error-on-alert=false: don't throw an error if the page calls alert (default: true)
this.options.raisePageError = CLI_OPTIONS.raisePageError.defaultsTo;
if( this.cli.has( CLI_OPTIONS.raisePageError.flag ) ){
@@ -244,10 +246,13 @@
this.on( 'page.error', this._saveHtmlOnErrorHandler );
}
- // --error-screen: print the casper.debugPage (the page's text) output on an error
+ // --error-screen: capture the casper browser screen on an error
if( this.cli.has( 'error-screen' ) ){
this.on( 'page.error', this._saveScreenOnErrorHandler );
}
+
+ // --fail-html: print the casper.debugHTML (the page's html) output on an test failure
+ // --fail-screen: print the casper browser screen output on an test failure
*/
// get any fixture data passed in as JSON in args
@@ -457,16 +462,18 @@
//TODO: * @param {Boolean} removeOtherListeners option to remove other listeners while this fires
// create three steps: 1) set up new error handler, 2) try the fn, 3) check for errors and rem. handler
var originalExitOnError,
+ originalErrorHandlers = [],
errorMsg = '', errorTrace = [],
recordError = function( msg, trace ){
errorMsg = msg; errorTrace = trace;
};
- // dont bail on the error (but preserve option), install hndlr to simply record msg, trace
- //NOTE: haven't had to remove other listeners yet
+ // dont bail on the error (but preserve option), uninstall other handlers,
+ // and install hndlr to simply record msg, trace
this.then( function(){
originalExitOnError = this.options.exitOnError;
this.options.exitOnError = false;
+ originalErrorHandlers = this.popAllListeners( 'error' );
this.on( 'error', recordError );
});
@@ -478,12 +485,24 @@
if( errorMsg ){
catchFn.call( this, errorMsg, errorTrace );
}
- // remove that listener either way and restore the bail option
+ // remove that listener either way, restore original handlers, and restore the bail option
this.removeListener( 'error', recordError );
+ this.addListeners( 'error', originalErrorHandlers );
this.options.exitOnError = originalExitOnError;
});
};
+/** Override capture to save to environ: GALAXY_TEST_SAVE (or passed in from CLI)
+ * @param {String} filename the image filename
+ */
+SpaceGhost.prototype.capture = function capture( filename, clipRect_or_selector ){
+ //TODO: override with saved output dir
+ if( clipRect_or_selector && ( !utils.isClipRect( clipRect_or_selector ) ) ){
+ this.debug( "USING CAPTURE SELECTOR" );
+ return this.captureSelector( filename, clipRect_or_selector );
+ }
+ return Casper.prototype.capture.apply( this, arguments );
+};
// =================================================================== TESTING
//TODO: form fill doesn't work as casperjs would want it - often a button -> controller url
@@ -600,6 +619,25 @@
// =================================================================== GALAXY CONVENIENCE
// =================================================================== MISCELAIN
+/** Pop all handlers for eventName from casper and return them in order.
+ * @param {String} eventName the name of the event from which to remove handlers
+ */
+SpaceGhost.prototype.popAllListeners = function popAllListeners( eventName ){
+ var returnedListeners = this.listeners( eventName );
+ this.removeAllListeners( eventName );
+ return returnedListeners;
+};
+
+/** Add the given list of handler functions to the listener for eventName in order.
+ * @param {String} eventName the name of the event to which to add handlers
+ * @param {Array} handlerArray an array of event handler functions to add
+ */
+SpaceGhost.prototype.addListeners = function addListeners( eventName, handlerArray ){
+ for( var i=0; i<handlerArray.length; i++ ){
+ this.addListener( eventName, handlerArray[i] );
+ }
+};
+
/** Send message to stderr
*/
SpaceGhost.prototype.stderr = function( msg ){
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: inithello: Fix error message about missing app.config.biostar_url in the tool shed.
by commits-noreply@bitbucket.org 12 Mar '13
by commits-noreply@bitbucket.org 12 Mar '13
12 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c03c46ced8cc/
changeset: c03c46ced8cc
user: inithello
date: 2013-03-12 21:50:41
summary: Fix error message about missing app.config.biostar_url in the tool shed.
affected #: 1 file
diff -r bb15d2df55dd838914c1bc0c762dbdd039b7d8ae -r c03c46ced8cc0ac2f36d92ac093684e6cf828609 lib/galaxy/webapps/tool_shed/config.py
--- a/lib/galaxy/webapps/tool_shed/config.py
+++ b/lib/galaxy/webapps/tool_shed/config.py
@@ -82,6 +82,7 @@
self.support_url = kwargs.get( 'support_url', 'http://wiki.g2.bx.psu.edu/Support' )
self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
self.blog_url = kwargs.get( 'blog_url', None )
+ self.biostar_url = kwargs.get( 'biostar_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.log_events = False
self.cloud_controller_instance = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Handle exceptions when attempting to parse certain xml definition files contained in tool shed repositories.
by commits-noreply@bitbucket.org 12 Mar '13
by commits-noreply@bitbucket.org 12 Mar '13
12 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bb15d2df55dd/
changeset: bb15d2df55dd
user: greg
date: 2013-03-12 21:36:44
summary: Handle exceptions when attempting to parse certain xml definition files contained in tool shed repositories.
affected #: 3 files
diff -r 1344cb5a487538671a652398ff254572b76e7719 -r bb15d2df55dd838914c1bc0c762dbdd039b7d8ae lib/tool_shed/galaxy_install/__init__.py
--- a/lib/tool_shed/galaxy_install/__init__.py
+++ b/lib/tool_shed/galaxy_install/__init__.py
@@ -1,7 +1,7 @@
"""
Classes encapsulating the management of repositories installed from Galaxy tool sheds.
"""
-import os
+import os, logging
import tool_shed.util.shed_util
import tool_shed.util.shed_util_common
from galaxy.model.orm import and_
@@ -12,6 +12,8 @@
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree, ElementInclude
+log = logging.getLogger( __name__ )
+
class InstalledRepositoryManager( object ):
def __init__( self, app ):
self.app = app
@@ -23,7 +25,11 @@
self.installed_repository_dicts = []
def get_repository_install_dir( self, tool_shed_repository ):
for tool_config in self.tool_configs:
- tree = ElementTree.parse( tool_config )
+ try:
+ tree = ElementTree.parse( tool_config )
+ except Exception, e:
+ log.debug( "Exception attempting to parse %s: %s" % ( str( tool_config ), str( e ) ) )
+ return None
root = tree.getroot()
ElementInclude.include( root )
tool_path = root.get( 'tool_path', None )
diff -r 1344cb5a487538671a652398ff254572b76e7719 -r bb15d2df55dd838914c1bc0c762dbdd039b7d8ae lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -15,6 +15,7 @@
def clean_tool_shed_url( base_url ):
protocol, base = base_url.split( '://' )
return base.rstrip( '/' )
+
def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status, set_status=True ):
# Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
sa_session = app.model.context.current
@@ -32,6 +33,7 @@
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
+
def create_temporary_tool_dependencies_config( tool_shed_url, name, owner, changeset_revision ):
"""Make a call to the tool shed to get the required repository's tool_dependencies.xml file."""
url = url_join( tool_shed_url,
@@ -54,6 +56,7 @@
message += "%s of installed repository %s owned by %s." % ( str( changeset_revision ), str( name ), str( owner ) )
raise Exception( message )
return None
+
def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
"""Return the absolute path to a specified disk file contained in a repository."""
stripped_file_name = strip_path( file_name )
@@ -64,6 +67,7 @@
if name == stripped_file_name:
return os.path.abspath( os.path.join( root, name ) )
return file_path
+
def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ):
sa_session = app.model.context.current
tool_shed = clean_tool_shed_url( tool_shed_url )
@@ -90,6 +94,7 @@
if tool_shed_repository:
return tool_shed_repository
return None
+
def get_tool_dependency_by_name_type_repository( app, repository, name, type ):
sa_session = app.model.context.current
return sa_session.query( app.model.ToolDependency ) \
@@ -97,6 +102,7 @@
app.model.ToolDependency.table.c.name == name,
app.model.ToolDependency.table.c.type == type ) ) \
.first()
+
def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ):
sa_session = app.model.context.current
return sa_session.query( app.model.ToolDependency ) \
@@ -105,6 +111,7 @@
app.model.ToolDependency.table.c.version == version,
app.model.ToolDependency.table.c.type == type ) ) \
.first()
+
def get_tool_dependency_install_dir( app, repository_name, repository_owner, repository_changeset_revision, tool_dependency_type, tool_dependency_name,
tool_dependency_version ):
if tool_dependency_type == 'package':
@@ -121,8 +128,10 @@
repository_owner,
repository_name,
repository_changeset_revision ) )
+
def get_tool_shed_repository_install_dir( app, tool_shed_repository ):
return os.path.abspath( tool_shed_repository.repo_files_directory( app ) )
+
def get_updated_changeset_revisions_from_tool_shed( tool_shed_url, name, owner, changeset_revision ):
"""Get all appropriate newer changeset revisions for the repository defined by the received tool_shed_url / name / owner combination."""
url = url_join( tool_shed_url,
@@ -131,6 +140,7 @@
text = response.read()
response.close()
return text
+
def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem ):
action_dict = {}
actions = []
@@ -168,6 +178,7 @@
actions.append( ( action_type, action_dict ) )
return tool_dependency, actions
return None, actions
+
def install_and_build_package_via_fabric( app, tool_dependency, actions_dict ):
sa_session = app.model.context.current
try:
@@ -182,6 +193,7 @@
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
sa_session.add( tool_dependency )
sa_session.flush()
+
def install_package( app, elem, tool_shed_repository, tool_dependencies=None ):
# The value of tool_dependencies is a partial or full list of ToolDependency records associated with the tool_shed_repository.
sa_session = app.model.context.current
@@ -317,6 +329,7 @@
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
+
def install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=None, proprietary_fabfile_path=None, **kwd ):
"""Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric."""
sa_session = app.model.context.current
@@ -389,6 +402,7 @@
raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
else:
install_and_build_package_via_fabric( app, tool_dependency, actions_dict )
+
def listify( item ):
"""
Make a single item a single item list, or return a list if passed a
@@ -402,6 +416,7 @@
return item.split( ',' )
else:
return [ item ]
+
def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, package_name, package_version, tool_dependencies_config ):
"""
Populate an actions dictionary that can be sent to fabric_util.install_and_build_package. This method handles the scenario where a tool_dependencies.xml
@@ -420,24 +435,26 @@
action_dict = {}
if tool_dependencies_config:
required_td_tree = parse_xml( tool_dependencies_config )
- required_td_root = required_td_tree.getroot()
- for required_td_elem in required_td_root:
- # Find the appropriate package name and version.
- if required_td_elem.tag == 'package':
- # <package name="bwa" version="0.5.9">
- required_td_package_name = required_td_elem.get( 'name', None )
- required_td_package_version = required_td_elem.get( 'version', None )
- if required_td_package_name==package_name and required_td_package_version==package_version:
- tool_dependency, actions = handle_set_environment_entry_for_package( app=app,
- install_dir=required_install_dir,
- tool_shed_repository=tool_shed_repository,
- package_name=package_name,
- package_version=package_version,
- elem=required_td_elem )
- if actions:
- actions_dict[ 'actions' ] = actions
- break
+ if required_td_tree:
+ required_td_root = required_td_tree.getroot()
+ for required_td_elem in required_td_root:
+ # Find the appropriate package name and version.
+ if required_td_elem.tag == 'package':
+ # <package name="bwa" version="0.5.9">
+ required_td_package_name = required_td_elem.get( 'name', None )
+ required_td_package_version = required_td_elem.get( 'version', None )
+ if required_td_package_name==package_name and required_td_package_version==package_version:
+ tool_dependency, actions = handle_set_environment_entry_for_package( app=app,
+ install_dir=required_install_dir,
+ tool_shed_repository=tool_shed_repository,
+ package_name=package_name,
+ package_version=package_version,
+ elem=required_td_elem )
+ if actions:
+ actions_dict[ 'actions' ] = actions
+ break
return tool_dependency, actions_dict
+
def run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd ):
"""
TODO: Handle this using the fabric api.
@@ -474,6 +491,7 @@
if returncode:
return message
handle_environment_settings( app, tool_dependency, install_dir, cmd )
+
def run_subprocess( app, cmd ):
env = os.environ
PYTHONPATH = env.get( 'PYTHONPATH', '' )
@@ -496,6 +514,7 @@
except:
pass
return returncode, message
+
def set_environment( app, elem, tool_shed_repository ):
"""
Create a ToolDependency to set an environment variable. This is different from the process used to set an environment variable that is associated
@@ -548,6 +567,7 @@
sa_session.add( tool_dependency )
sa_session.flush()
print 'Environment variable ', env_var_name, 'set in', install_dir
+
def strip_path( fpath ):
if not fpath:
return fpath
@@ -556,12 +576,18 @@
except:
file_name = fpath
return file_name
+
def parse_xml( file_name ):
"""Returns a parsed xml tree."""
- tree = ElementTree.parse( file_name )
+ try:
+ tree = ElementTree.parse( file_name )
+ except Exception, e:
+ print "Exception attempting to parse ", file_name, ": ", str( e )
+ return None
root = tree.getroot()
ElementInclude.include( root )
return tree
+
def url_join( *args ):
parts = []
for arg in args:
diff -r 1344cb5a487538671a652398ff254572b76e7719 -r bb15d2df55dd838914c1bc0c762dbdd039b7d8ae lib/tool_shed/util/shed_util.py
--- a/lib/tool_shed/util/shed_util.py
+++ b/lib/tool_shed/util/shed_util.py
@@ -1054,7 +1054,11 @@
"""
installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
- tree = ElementTree.parse( tool_dependencies_config )
+ try:
+ tree = ElementTree.parse( tool_dependencies_config )
+ except Exception, e:
+ log.debug( "Exception attempting to parse %s: %s" % ( str( tool_dependencies_config ), str( e ) ) )
+ return installed_tool_dependencies
root = tree.getroot()
ElementInclude.include( root )
fabric_version_checked = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4e08a83d7f9b/
changeset: 4e08a83d7f9b
user: inithello
date: 2013-03-12 21:23:31
summary: Explicitly set flags in repository_metadata records when a new tip has been uploaded that does not generate a new downloadable changeset revision.
affected #: 1 file
diff -r 9510a9b449c400ca441c8e1fccd66e6a71f22624 -r 4e08a83d7f9bacd20b956dc3e97810df6c19c730 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -3837,6 +3837,30 @@
repository_metadata.changeset_revision = repository.tip( trans.app )
repository_metadata.metadata = metadata_dict
repository_metadata.downloadable = downloadable
+ if 'datatypes' in metadata_dict:
+ repository_metadata.includes_datatypes = True
+ else:
+ repository_metadata.includes_datatypes = False
+ if 'repository_dependencies' in metadata_dict:
+ repository_metadata.has_repository_dependencies = True
+ else:
+ repository_metadata.has_repository_dependencies = False
+ if 'tool_dependencies' in metadata_dict:
+ repository_metadata.includes_tool_dependencies = True
+ else:
+ repository_metadata.includes_tool_dependencies = False
+ if 'tools' in metadata_dict:
+ repository_metadata.includes_tools = True
+ else:
+ repository_metadata.includes_tools = False
+ if 'workflows' in metadata_dict:
+ repository_metadata.includes_workflows = True
+ else:
+ repository_metadata.includes_workflows = False
+ repository_metadata.do_not_test = False
+ repository_metadata.time_last_tested = None
+ repository_metadata.tools_functionally_correct = False
+ repository_metadata.tool_test_errors = None
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
else:
https://bitbucket.org/galaxy/galaxy-central/commits/1344cb5a4875/
changeset: 1344cb5a4875
user: inithello
date: 2013-03-12 21:23:48
summary: Fix for setting hours_between_check.
affected #: 1 file
diff -r 4e08a83d7f9bacd20b956dc3e97810df6c19c730 -r 1344cb5a487538671a652398ff254572b76e7719 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -70,7 +70,7 @@
self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
try:
self.hours_between_check = kwargs.get( 'hours_between_check', 12 )
- if isinstance( hours_between_check, float ):
+ if isinstance( self.hours_between_check, float ):
# Float values are supported for functional tests.
if self.hours_between_check < 0.001 or self.hours_between_check > 24.0:
self.hours_between_check = 12.0
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: carlfeberhard: history panel: remove display, edit, and delete buttons from hdas in the new state; pack scripts
by commits-noreply@bitbucket.org 12 Mar '13
by commits-noreply@bitbucket.org 12 Mar '13
12 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9510a9b449c4/
changeset: 9510a9b449c4
user: carlfeberhard
date: 2013-03-12 21:14:37
summary: history panel: remove display, edit, and delete buttons from hdas in the new state; pack scripts
affected #: 4 files
diff -r e0e98d8937f4c01d65208dc65af9fe33bae62e0d -r 9510a9b449c400ca441c8e1fccd66e6a71f22624 static/scripts/mvc/dataset/hda-base.js
--- a/static/scripts/mvc/dataset/hda-base.js
+++ b/static/scripts/mvc/dataset/hda-base.js
@@ -201,6 +201,7 @@
// don't show display if not viewable or not accessible
// (do show if in error, running)
if( ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NOT_VIEWABLE )
+ || ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NEW )
|| ( !this.model.get( 'accessible' ) ) ){
this.displayButton = null;
return null;
diff -r e0e98d8937f4c01d65208dc65af9fe33bae62e0d -r 9510a9b449c400ca441c8e1fccd66e6a71f22624 static/scripts/mvc/dataset/hda-edit.js
--- a/static/scripts/mvc/dataset/hda-edit.js
+++ b/static/scripts/mvc/dataset/hda-edit.js
@@ -99,8 +99,8 @@
// don't show edit while uploading, in-accessible
// DO show if in error (ala previous history panel)
//TODO??: not viewable/accessible are essentially the same (not viewable set from accessible)
- if( ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.UPLOAD )
- //|| ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.ERROR )
+ if( ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NEW )
+ || ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.UPLOAD )
|| ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NOT_VIEWABLE )
|| ( !this.model.get( 'accessible' ) ) ){
this.editButton = null;
@@ -136,7 +136,8 @@
_render_deleteButton : function(){
// don't show delete if...
//TODO??: not viewable/accessible are essentially the same (not viewable set from accessible)
- if( ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NOT_VIEWABLE )
+ if( ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NEW )
+ || ( this.model.get( 'state' ) === HistoryDatasetAssociation.STATES.NOT_VIEWABLE )
|| ( !this.model.get( 'accessible' ) ) ){
this.deleteButton = null;
return null;
diff -r e0e98d8937f4c01d65208dc65af9fe33bae62e0d -r 9510a9b449c400ca441c8e1fccd66e6a71f22624 static/scripts/packed/mvc/dataset/hda-base.js
--- a/static/scripts/packed/mvc/dataset/hda-base.js
+++ b/static/scripts/packed/mvc/dataset/hda-base.js
@@ -1,1 +1,1 @@
-var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urlTemplates=a.urlTemplates;this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);this.urls=this._renderUrls(this.urlTemplates,this.model.toJSON());a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this._setUpBehaviors(a);this.body=$(this._render_body());a.append(this.body);this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{try{c[f]=_.template(e,a)}catch(g){throw (b+"._renderUrls error: "+g+"\n rendering:"+e+"\n with "+JSON.stringify(a))}}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},_setUpBehaviors:function(a){a=a||this.$el;make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"})},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){var a=this.model.get("peek");if(!a){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(a))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: none");if(this.expanded){this._render_body_html(a);a.show()}return a},_render_body_html:function(a){a.html("");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NEW:break;case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.PAUSED:this._render_body_paused(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+this.model.get("state")+'".</div>'))}a.append('<div style="clear: both"></div>');this._setUpBehaviors(a)},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_paused:function(a){a.append($("<div>"+_l("Job is paused. Use the history menu to resume")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred with this dataset")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(_.extend(this.model.toJSON(),{urls:this.urls}))));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this;this.expanded=(a===undefined)?(!this.body.is(":visible")):(a);if(this.expanded){b._render_body_html(b.body);this.body.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{this.body.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},remove:function(b){var a=this;this.$el.fadeOut("fast",function(){a.$el.remove();if(b){b()}})},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetadata"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
+var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urlTemplates=a.urlTemplates;this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);this.urls=this._renderUrls(this.urlTemplates,this.model.toJSON());a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this._setUpBehaviors(a);this.body=$(this._render_body());a.append(this.body);this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{try{c[f]=_.template(e,a)}catch(g){throw (b+"._renderUrls error: "+g+"\n rendering:"+e+"\n with "+JSON.stringify(a))}}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},_setUpBehaviors:function(a){a=a||this.$el;make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"})},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NEW)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){var a=this.model.get("peek");if(!a){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(a))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: none");if(this.expanded){this._render_body_html(a);a.show()}return a},_render_body_html:function(a){a.html("");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NEW:this._render_body_new(a);break;case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.PAUSED:this._render_body_paused(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+this.model.get("state")+'".</div>'))}a.append('<div style="clear: both"></div>');this._setUpBehaviors(a)},_render_body_new:function(b){var a="This is a new dataset and not all of its data are available yet";b.append($("<div>"+_l(a)+"</div>"))},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+"</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+"</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_paused:function(a){a.append($("<div>"+_l("Job is paused. Use the history menu to resume")+"</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+"</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred with this dataset")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(_.extend(this.model.toJSON(),{urls:this.urls}))));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this;this.expanded=(a===undefined)?(!this.body.is(":visible")):(a);if(this.expanded){b._render_body_html(b.body);this.body.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{this.body.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},remove:function(b){var a=this;this.$el.fadeOut("fast",function(){a.$el.remove();if(b){b()}})},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetadata"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
diff -r e0e98d8937f4c01d65208dc65af9fe33bae62e0d -r 9510a9b449c400ca441c8e1fccd66e6a71f22624 static/scripts/packed/mvc/dataset/hda-edit.js
--- a/static/scripts/packed/mvc/dataset/hda-edit.js
+++ b/static/scripts/packed/mvc/dataset/hda-edit.js
@@ -1,1 +1,1 @@
-var HDAEditView=HDABaseView.extend(LoggableMixin).extend({initialize:function(a){HDABaseView.prototype.initialize.call(this,a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton]},_setUpBehaviors:function(c){HDABaseView.prototype._setUpBehaviors.call(this,c);var a=this,b=this.urls.purge,d=c.find("#historyItemPurger-"+this.model.get("id"));if(d){d.attr("href",["javascript","void(0)"].join(":"));d.click(function(e){var f=jQuery.ajax(b);f.success(function(i,g,h){a.model.set("purged",true)});f.error(function(h,g,i){alert("("+h.status+") Unable to purge this dataset:\n"+i)})})}},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());a.append(this._render_editButton());a.append(this._render_deleteButton());return a},_render_editButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.UPLOAD)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.editButton=null;return null}var c=this.model.get("purged"),a=this.model.get("deleted"),b={title:_l("Edit Attributes"),href:this.urls.edit,target:"galaxy_main",icon_class:"edit"};if(a||c){b.enabled=false;if(c){b.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(a){b.title=_l("Undelete dataset to edit attributes")}}}this.editButton=new IconButtonView({model:new IconButton(b)});return this.editButton.render().$el},_render_deleteButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.deleteButton=null;return null}var a=this,b=a.urls["delete"],c={title:_l("Delete"),href:b,id:"historyItemDeleter-"+this.model.get("id"),icon_class:"delete",on_click:function(){$.ajax({url:b,type:"POST",error:function(){a.$el.show()},success:function(){a.model.set({deleted:true})}})}};if(this.model.get("deleted")||this.model.get("purged")){c={title:_l("Dataset is already deleted"),icon_class:"delete",enabled:false}}this.deleteButton=new IconButtonView({model:new IconButton(c)});return this.deleteButton.render().$el},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){_.extend(a,{dbkey_unknown_and_editable:true})}return HDABaseView.templates.hdaSummary(a)},_render_errButton:function(){if(this.model.get("state")!==HistoryDatasetAssociation.STATES.ERROR){this.errButton=null;return null}this.errButton=new IconButtonView({model:new IconButton({title:_l("View or report this error"),href:this.urls.report_error,target:"galaxy_main",icon_class:"bug"})});return this.errButton.render().$el},_render_rerunButton:function(){this.rerunButton=new IconButtonView({model:new IconButton({title:_l("Run this job again"),href:this.urls.rerun,target:"galaxy_main",icon_class:"arrow-circle"})});return this.rerunButton.render().$el},_render_visualizationsButton:function(){var c=this.model.get("dbkey"),a=this.model.get("visualizations"),f=this.urls.visualization,d={},g={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(c){g.dbkey=c}if(!(this.model.hasData())||!(a&&a.length)||!(f)){this.visualizationsButton=null;return null}this.visualizationsButton=new IconButtonView({model:new IconButton({title:_l("Visualize"),href:f,icon_class:"chart_curve"})});var b=this.visualizationsButton.render().$el;b.addClass("visualize-icon");function e(h){switch(h){case"trackster":return create_trackster_action_fn(f,g,c);case"scatterplot":return create_scatterplot_action_fn(f,g);default:return function(){window.parent.location=f+"/"+h+"?"+$.param(g)}}}if(a.length===1){b.attr("title",a[0]);b.click(e(a[0]))}else{_.each(a,function(i){var h=i.charAt(0).toUpperCase()+i.slice(1);d[_l(h)]=e(i)});make_popupmenu(b,d)}return b},_render_secondaryActionButtons:function(b){var c=$("<div/>"),a=this;c.attr("style","float: right;").attr("id","secondary-actions-"+this.model.get("id"));_.each(b,function(d){c.append(d.call(a))});return c},_render_tagButton:function(){if(!(this.model.hasData())||(!this.urls.tags.get)){this.tagButton=null;return null}this.tagButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset tags"),target:"galaxy_main",href:this.urls.tags.get,icon_class:"tags"})});return this.tagButton.render().$el},_render_annotateButton:function(){if(!(this.model.hasData())||(!this.urls.annotation.get)){this.annotateButton=null;return null}this.annotateButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset annotation"),target:"galaxy_main",icon_class:"annotate"})});return this.annotateButton.render().$el},_render_tagArea:function(){if(!this.urls.tags.set){return null}return $(HDAEditView.templates.tagArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_annotationArea:function(){if(!this.urls.annotation.get){return null}return $(HDAEditView.templates.annotationArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_body_error:function(a){HDABaseView.prototype._render_body_error.call(this,a);var b=a.find("#primary-actions-"+this.model.get("id"));b.prepend(this._render_errButton())},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton,this._render_visualizationsButton]));a.append(this._render_secondaryActionButtons([this._render_tagButton,this._render_annotateButton]));a.append('<div class="clear"/>');a.append(this._render_tagArea());a.append(this._render_annotationArea());a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility","click a.icon-button.tags":"loadAndDisplayTags","click a.icon-button.annotate":"loadAndDisplayAnnotation"},loadAndDisplayTags:function(b){this.log(this+".loadAndDisplayTags",b);var c=this.$el.find(".tag-area"),a=c.find(".tag-elt");if(c.is(":hidden")){if(!jQuery.trim(a.html())){$.ajax({url:this.urls.tags.get,error:function(){alert(_l("Tagging failed"))},success:function(d){a.html(d);a.find(".tooltip").tooltip();c.slideDown("fast")}})}else{c.slideDown("fast")}}else{c.slideUp("fast")}return false},loadAndDisplayAnnotation:function(b){this.log(this+".loadAndDisplayAnnotation",b);var d=this.$el.find(".annotation-area"),c=d.find(".annotation-elt"),a=this.urls.annotation.set;if(d.is(":hidden")){if(!jQuery.trim(c.html())){$.ajax({url:this.urls.annotation.get,error:function(){alert(_l("Annotations failed"))},success:function(e){if(e===""){e="<em>"+_l("Describe or add notes to dataset")+"</em>"}c.html(e);d.find(".tooltip").tooltip();async_save_text(c.attr("id"),c.attr("id"),a,"new_annotation",18,true,4);d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDAView("+a+")"}});HDAEditView.templates={tagArea:Handlebars.templates["template-hda-tagArea"],annotationArea:Handlebars.templates["template-hda-annotationArea"]};function create_scatterplot_action_fn(a,b){action=function(){var d=$(window.parent.document).find("iframe#galaxy_main"),c=a+"/scatterplot?"+$.param(b);d.attr("src",c);$("div.popmenu-wrapper").remove();return false};return action}function create_trackster_action_fn(a,c,b){return function(){var d={};if(b){d["f-dbkey"]=b}$.ajax({url:a+"/list_tracks?"+$.param(d),dataType:"html",error:function(){alert(_l("Could not add this dataset to browser")+".")},success:function(e){var f=window.parent;f.show_modal(_l("View Data in a New or Saved Visualization"),"",{Cancel:function(){f.hide_modal()},"View in saved visualization":function(){f.show_modal(_l("Add Data to Saved Visualization"),e,{Cancel:function(){f.hide_modal()},"Add to visualization":function(){$(f.document).find("input[name=id]:checked").each(function(){var g=$(this).val();c.id=g;f.location=a+"/trackster?"+$.param(c)})}})},"View in new visualization":function(){f.location=a+"/trackster?"+$.param(c)}})}});return false}};
\ No newline at end of file
+var HDAEditView=HDABaseView.extend(LoggableMixin).extend({initialize:function(a){HDABaseView.prototype.initialize.call(this,a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton]},_setUpBehaviors:function(c){HDABaseView.prototype._setUpBehaviors.call(this,c);var a=this,b=this.urls.purge,d=c.find("#historyItemPurger-"+this.model.get("id"));if(d){d.attr("href",["javascript","void(0)"].join(":"));d.click(function(e){var f=jQuery.ajax(b);f.success(function(i,g,h){a.model.set("purged",true)});f.error(function(h,g,i){alert("("+h.status+") Unable to purge this dataset:\n"+i)})})}},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());a.append(this._render_editButton());a.append(this._render_deleteButton());return a},_render_editButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===HistoryDatasetAssociation.STATES.UPLOAD)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.editButton=null;return null}var c=this.model.get("purged"),a=this.model.get("deleted"),b={title:_l("Edit Attributes"),href:this.urls.edit,target:"galaxy_main",icon_class:"edit"};if(a||c){b.enabled=false;if(c){b.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(a){b.title=_l("Undelete dataset to edit attributes")}}}this.editButton=new IconButtonView({model:new IconButton(b)});return this.editButton.render().$el},_render_deleteButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.deleteButton=null;return null}var a=this,b=a.urls["delete"],c={title:_l("Delete"),href:b,id:"historyItemDeleter-"+this.model.get("id"),icon_class:"delete",on_click:function(){$.ajax({url:b,type:"POST",error:function(){a.$el.show()},success:function(){a.model.set({deleted:true})}})}};if(this.model.get("deleted")||this.model.get("purged")){c={title:_l("Dataset is already deleted"),icon_class:"delete",enabled:false}}this.deleteButton=new IconButtonView({model:new IconButton(c)});return this.deleteButton.render().$el},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){_.extend(a,{dbkey_unknown_and_editable:true})}return HDABaseView.templates.hdaSummary(a)},_render_errButton:function(){if(this.model.get("state")!==HistoryDatasetAssociation.STATES.ERROR){this.errButton=null;return null}this.errButton=new IconButtonView({model:new IconButton({title:_l("View or report this error"),href:this.urls.report_error,target:"galaxy_main",icon_class:"bug"})});return this.errButton.render().$el},_render_rerunButton:function(){this.rerunButton=new IconButtonView({model:new IconButton({title:_l("Run this job again"),href:this.urls.rerun,target:"galaxy_main",icon_class:"arrow-circle"})});return this.rerunButton.render().$el},_render_visualizationsButton:function(){var c=this.model.get("dbkey"),a=this.model.get("visualizations"),f=this.urls.visualization,d={},g={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(c){g.dbkey=c}if(!(this.model.hasData())||!(a&&a.length)||!(f)){this.visualizationsButton=null;return null}this.visualizationsButton=new IconButtonView({model:new IconButton({title:_l("Visualize"),href:f,icon_class:"chart_curve"})});var b=this.visualizationsButton.render().$el;b.addClass("visualize-icon");function e(h){switch(h){case"trackster":return create_trackster_action_fn(f,g,c);case"scatterplot":return create_scatterplot_action_fn(f,g);default:return function(){window.parent.location=f+"/"+h+"?"+$.param(g)}}}if(a.length===1){b.attr("title",a[0]);b.click(e(a[0]))}else{_.each(a,function(i){var h=i.charAt(0).toUpperCase()+i.slice(1);d[_l(h)]=e(i)});make_popupmenu(b,d)}return b},_render_secondaryActionButtons:function(b){var c=$("<div/>"),a=this;c.attr("style","float: right;").attr("id","secondary-actions-"+this.model.get("id"));_.each(b,function(d){c.append(d.call(a))});return c},_render_tagButton:function(){if(!(this.model.hasData())||(!this.urls.tags.get)){this.tagButton=null;return null}this.tagButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset tags"),target:"galaxy_main",href:this.urls.tags.get,icon_class:"tags"})});return this.tagButton.render().$el},_render_annotateButton:function(){if(!(this.model.hasData())||(!this.urls.annotation.get)){this.annotateButton=null;return null}this.annotateButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset annotation"),target:"galaxy_main",icon_class:"annotate"})});return this.annotateButton.render().$el},_render_tagArea:function(){if(!this.urls.tags.set){return null}return $(HDAEditView.templates.tagArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_annotationArea:function(){if(!this.urls.annotation.get){return null}return $(HDAEditView.templates.annotationArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_body_error:function(a){HDABaseView.prototype._render_body_error.call(this,a);var b=a.find("#primary-actions-"+this.model.get("id"));b.prepend(this._render_errButton())},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton,this._render_visualizationsButton]));a.append(this._render_secondaryActionButtons([this._render_tagButton,this._render_annotateButton]));a.append('<div class="clear"/>');a.append(this._render_tagArea());a.append(this._render_annotationArea());a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility","click a.icon-button.tags":"loadAndDisplayTags","click a.icon-button.annotate":"loadAndDisplayAnnotation"},loadAndDisplayTags:function(b){this.log(this+".loadAndDisplayTags",b);var c=this.$el.find(".tag-area"),a=c.find(".tag-elt");if(c.is(":hidden")){if(!jQuery.trim(a.html())){$.ajax({url:this.urls.tags.get,error:function(){alert(_l("Tagging failed"))},success:function(d){a.html(d);a.find(".tooltip").tooltip();c.slideDown("fast")}})}else{c.slideDown("fast")}}else{c.slideUp("fast")}return false},loadAndDisplayAnnotation:function(b){this.log(this+".loadAndDisplayAnnotation",b);var d=this.$el.find(".annotation-area"),c=d.find(".annotation-elt"),a=this.urls.annotation.set;if(d.is(":hidden")){if(!jQuery.trim(c.html())){$.ajax({url:this.urls.annotation.get,error:function(){alert(_l("Annotations failed"))},success:function(e){if(e===""){e="<em>"+_l("Describe or add notes to dataset")+"</em>"}c.html(e);d.find(".tooltip").tooltip();async_save_text(c.attr("id"),c.attr("id"),a,"new_annotation",18,true,4);d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDAView("+a+")"}});HDAEditView.templates={tagArea:Handlebars.templates["template-hda-tagArea"],annotationArea:Handlebars.templates["template-hda-annotationArea"]};function create_scatterplot_action_fn(a,b){action=function(){var d=$(window.parent.document).find("iframe#galaxy_main"),c=a+"/scatterplot?"+$.param(b);d.attr("src",c);$("div.popmenu-wrapper").remove();return false};return action}function create_trackster_action_fn(a,c,b){return function(){var d={};if(b){d["f-dbkey"]=b}$.ajax({url:a+"/list_tracks?"+$.param(d),dataType:"html",error:function(){alert(_l("Could not add this dataset to browser")+".")},success:function(e){var f=window.parent;f.show_modal(_l("View Data in a New or Saved Visualization"),"",{Cancel:function(){f.hide_modal()},"View in saved visualization":function(){f.show_modal(_l("Add Data to Saved Visualization"),e,{Cancel:function(){f.hide_modal()},"Add to visualization":function(){$(f.document).find("input[name=id]:checked").each(function(){var g=$(this).val();c.id=g;f.location=a+"/trackster?"+$.param(c)})}})},"View in new visualization":function(){f.location=a+"/trackster?"+$.param(c)}})}});return false}};
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0