galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2013
- 2 participants
- 189 discussions
commit/galaxy-central: greg: Display the Orphan column in the valid tool dependencies container in the tool shed.
by Bitbucket 18 Feb '13
by Bitbucket 18 Feb '13
18 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/31aca8c9901e/
changeset: 31aca8c9901e
user: greg
date: 2013-02-18 22:00:43
summary: Display the Orphan column in the valid tool dependencies container in the tool shed.
affected #: 2 files
diff -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd -r 31aca8c9901ecf4ad852f7114d6550d997374888 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -514,7 +514,7 @@
installation_status=None,
repository_id=None,
tool_dependency_id=None,
- is_orphan=None )
+ is_orphan='Orphan' )
folder.tool_dependencies.append( tool_dependency )
is_orphan_description = "these dependencies may not be required by tools in this repository"
for dependency_key, requirements_dict in tool_dependencies.items():
diff -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd -r 31aca8c9901ecf4ad852f7114d6550d997374888 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -628,13 +628,17 @@
${version_str | h}
</${cell_type}><${cell_type}>${tool_dependency.type | h}</${cell_type}>
- %if trans.webapp.name == 'galaxy':
- %if is_missing:
- <${cell_type}>${tool_dependency.installation_status | h}</${cell_type}>
- %elif tool_dependency.install_dir:
- <${cell_type}>${tool_dependency.install_dir | h}</${cell_type}>
+ <${cell_type}>
+ %if trans.webapp.name == 'galaxy':
+ %if is_missing:
+ ${tool_dependency.installation_status | h}
+ %elif tool_dependency.install_dir:
+ ${tool_dependency.install_dir | h}
+ %endif
+ %else:
+ ${tool_dependency.is_orphan | h}
%endif
- %endif
+ </${cell_type}></tr><%
my_row = row_counter.count
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional tests for: Citable URLs, simple repository dependencies with multiple repository owners.
by Bitbucket 18 Feb '13
by Bitbucket 18 Feb '13
18 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ca145a616aee/
changeset: ca145a616aee
user: inithello
date: 2013-02-18 21:58:52
summary: Tool shed functional tests for: Citable URLs, simple repository dependencies with multiple repository owners.
affected #: 6 files
diff -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -695,11 +695,37 @@
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
repository_ids = self.initiate_installation_process( new_tool_panel_section=new_tool_panel_section )
self.wait_for_repository_installation( repository_ids )
- def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
- url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
- ( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
+ def load_citable_url( self,
+ username,
+ repository_name,
+ changeset_revision,
+ encoded_user_id,
+ encoded_repository_id,
+ strings_displayed=[],
+ strings_not_displayed=[],
+ strings_displayed_in_iframe=[],
+ strings_not_displayed_in_iframe=[] ):
+ url = '%s/view/%s' % ( self.url, username )
+ # If repository name is passed in, append that to the url.
+ if repository_name:
+ url += '/%s' % repository_name
+ if changeset_revision:
+ # Changeset revision should never be provided unless repository name also is.
+ assert repository_name is not None, 'Changeset revision is present, but repository name is not - aborting.'
+ url += '/%s' % changeset_revision
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ # Now load the page that should be displayed inside the iframe and check for strings.
+ if encoded_repository_id:
+ url = '/repository/view_repository?id=%s&operation=view_or_manage_repository' % encoded_repository_id
+ if changeset_revision:
+ url += '&changeset_revision=%s' % changeset_revision
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed_in_iframe, strings_not_displayed_in_iframe )
+ elif encoded_user_id:
+ url = '/repository/browse_repositories?user_id=%s&operation=repositories_by_user' % encoded_user_id
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed_in_iframe, strings_not_displayed_in_iframe )
def load_display_tool_page( self, repository, tool_xml_path, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/display_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
@@ -709,6 +735,11 @@
url = '/admin/review_tool_migration_stages'
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
+ ( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def load_workflow_image_in_tool_shed( self, repository, workflow_name, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
if not changeset_revision:
changeset_revision = self.get_repository_tip( repository )
diff -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd test/tool_shed/functional/test_0120_simple_repository_dependency_multiple_owners.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0120_simple_repository_dependency_multiple_owners.py
@@ -0,0 +1,140 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'blast_datatypes_0120'
+datatypes_repository_description = 'Galaxy applicable datatypes for BLAST'
+datatypes_repository_long_description = 'Galaxy datatypes for the BLAST top hit descriptons tool'
+
+tool_repository_name = 'blastxml_to_top_descr_0120'
+tool_repository_description = 'BLAST top hit descriptions'
+tool_repository_long_description = 'Make a table from BLAST XML'
+
+'''
+Tool shed side:
+
+1) Create and populate blast_datatypes_0120.
+1a) Check for appropriate strings.
+2) Create and populate blastxml_to_top_descr_0120.
+2a) Check for appropriate strings.
+3) Upload repository_dependencies.xml to blastxml_to_top_descr_0120 that defines a relationship to blast_datatypes_0120.
+3a) Check for appropriate strings.
+'''
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+
+class TestRepositoryMultipleOwners( ShedTwillTestCase ):
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other tests.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ test_user_2 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+ test_user_2_private_role = test_db_util.get_private_role( test_user_2 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_datatypes_repository( self ):
+ """Create and populate the blast_datatypes_0120 repository"""
+ """
+ We are at step 1.
+ Create and populate blast_datatypes.
+ """
+ category = self.create_category( name='Test 0120', description='Description of test 0120' )
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % datatypes_repository_name,
+ 'Repository %s has been created' % "'%s'" % datatypes_repository_name ]
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_2_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='blast/blast_datatypes.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded blast_datatypes tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ def test_0010_verify_datatypes_repository( self ):
+ '''Verify the blast_datatypes_0120 repository.'''
+ '''
+ We are at step 1a.
+ Check for appropriate strings, most importantly BlastXml, BlastNucDb, and BlastProtDb,
+ the datatypes that are defined in datatypes_conf.xml.
+ '''
+ global repository_datatypes_count
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+ strings_displayed = [ 'BlastXml', 'BlastNucDb', 'BlastProtDb', 'application/xml', 'text/html', 'blastxml', 'blastdbn', 'blastdbp']
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ repository_datatypes_count = int( self.get_repository_datatypes_count( repository ) )
+ def test_0015_create_tool_repository( self ):
+ """Create and populate the blastxml_to_top_descr_0120 repository"""
+ """
+ We are at step 2.
+ Create and populate blastxml_to_top_descr_0120.
+ """
+ category = self.create_category( name='Test 0120', description='Description of test 0120' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % tool_repository_name,
+ 'Repository %s has been created' % "'%s'" % tool_repository_name ]
+ repository = self.get_or_create_repository( name=tool_repository_name,
+ description=tool_repository_description,
+ long_description=tool_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='blast/blastxml_to_top_descr.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded blastxml_to_top_descr tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ def test_0020_verify_tool_repository( self ):
+ '''Verify the blastxml_to_top_descr_0120 repository.'''
+ '''
+ We are at step 2a.
+ Check for appropriate strings, such as tool name, description, and version.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+ strings_displayed = [ 'blastxml_to_top_descr_0120', 'BLAST top hit descriptions', 'Make a table from BLAST XML' ]
+ strings_displayed.extend( [ '0.0.1', 'Valid tools'] )
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ def test_0025_create_repository_dependency( self ):
+ '''Create a repository dependency on blast_datatypes_0120.'''
+ '''
+ We are at step 3.
+ Create a simple repository dependency for blastxml_to_top_descr_0120 that defines a dependency on blast_datatypes_0120.
+ '''
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+ dependency_xml_path = self.generate_temp_path( 'test_0120', additional_paths=[ 'dependencies' ] )
+ self.create_repository_dependency( repository=tool_repository, depends_on=[ datatypes_repository ], filepath=dependency_xml_path )
+ def test_0040_verify_repository_dependency( self ):
+ '''Verify the created repository dependency.'''
+ '''
+ We are at step 3a.
+ Check the newly created repository dependency to ensure that it was defined and displays correctly.
+ '''
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( tool_repository, datatypes_repository )
diff -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd test/tool_shed/functional/test_0420_citable_urls_for_repositories.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0420_citable_urls_for_repositories.py
@@ -0,0 +1,233 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+import logging
+log = logging.getLogger(__name__)
+
+repository_name = 'filtering_0420'
+repository_description = 'Galaxy filtering tool for test 0420'
+repository_long_description = 'Long description of Galaxy filtering tool for test 0410'
+
+first_changeset_hash = ''
+
+'''
+1. Add and populate a repository to the tool shed with change set revision 0 (assume owner is test).
+2. Add valid change set revision 1.
+3. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1
+4. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420
+ Resulting page should contain change set revision 1
+5. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420/<revision 0>
+ Resulting page should not contain change set revision 1, but should contain change set revision 0.
+6. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420/<invalid revision>
+7. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/<invalid repository name>
+8. Visit the following url and check for appropriate strings: <tool shed base url>/view/<invalid owner>
+'''
+
+class TestRepositoryCitableURLs( ShedTwillTestCase ):
+ '''Test repository citable url features.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other tests.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_repository( self ):
+ """Create and populate the filtering_0420 repository"""
+ """
+ We are at step 1.
+ Add and populate a repository to the tool shed with change set revision 0 (assume owner is test_user_1).
+ """
+ global first_changeset_hash
+ category = self.create_category( name='Test 0400 Repository Citable URLs',
+ description='Test 0400 Repository Citable URLs category' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
+ 'Repository %s has been created' % "'%s'" % repository_name ]
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='filtering/filtering_2.2.0.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded filtering 2.2.0 tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ # We'll be checking for this hash later, after uploading another file to the repository, making get_repository_tip() not usable.
+ first_changeset_hash = self.get_repository_tip( repository )
+ def test_0010_upload_new_file_to_repository( self ):
+ '''Upload a readme file to the repository in order to create a second changeset revision.'''
+ '''
+ We are at step 2.
+ Add valid change set revision 1.
+ The repository should now contain two changeset revisions, 0:<revision hash> and 1:<revision hash>.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ filename='readme.txt',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded readme.txt.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ def test_0015_load_user_view_page( self ):
+ '''Load the /view/<username> page amd check for strings.'''
+ '''
+ We are at step 3.
+ Visit the following url and check for appropriate strings: <tool shed base url>/view/user1
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ encoded_user_id = self.security.encode_id( test_user_1.id )
+ # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+ # then directly load the url that the iframe should be loading and check for the expected strings.
+ # The iframe should point to /repository/browse_repositories?user_id=<encoded user ID>&operation=repositories_by_user
+ strings_displayed = [ '/repository/browse_repositories', encoded_user_id, 'operation=repositories_by_user' ]
+ strings_displayed.append( encoded_user_id )
+ strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420' ]
+ strings_displayed_in_iframe.append( self.get_repository_tip( repository ) )
+ self.load_citable_url( username='user1',
+ repository_name=None,
+ changeset_revision=None,
+ encoded_user_id=encoded_user_id,
+ encoded_repository_id=None,
+ strings_displayed=strings_displayed,
+ strings_displayed_in_iframe=strings_displayed_in_iframe )
+ def test_0020_load_repository_view_page( self ):
+ '''Load the /view/<user>/<repository> page and check for the appropriate strings.'''
+ '''
+ We are at step 4.
+ Visit the following url and check for strings: <tool shed base url>/view/user1/filtering_0420
+ Resulting page should contain change set revision 1
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ encoded_user_id = self.security.encode_id( test_user_1.id )
+ encoded_repository_id = self.security.encode_id( repository.id )
+ # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+ # then directly load the url that the iframe should be loading and check for the expected strings.
+ # The iframe should point to /repository/bview_repository?id=<encoded repository ID>
+ strings_displayed = [ '/repository', 'view_repository', 'id=', encoded_repository_id ]
+ strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420' ]
+ strings_displayed_in_iframe.append( self.get_repository_tip( repository ) )
+ strings_displayed_in_iframe.append( 'Sharable link to this repository:' )
+ strings_displayed_in_iframe.append( '%s/view/user1/filtering_0420' % self.url )
+ self.load_citable_url( username='user1',
+ repository_name='filtering_0420',
+ changeset_revision=None,
+ encoded_user_id=encoded_user_id,
+ encoded_repository_id=encoded_repository_id,
+ strings_displayed=strings_displayed,
+ strings_displayed_in_iframe=strings_displayed_in_iframe )
+ def test_0025_load_view_page_for_previous_revision( self ):
+ '''Load a citable url for a past changeset revision and verify that strings display.'''
+ '''
+ We are at step 5.
+ Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420/<revision 0>
+ Resulting page should not contain change set revision 1, but should contain change set revision 0.
+ '''
+ global first_changeset_hash
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ encoded_user_id = self.security.encode_id( test_user_1.id )
+ encoded_repository_id = self.security.encode_id( repository.id )
+ # We are checking the changeset revision pointed to by first_changeset_hash, stored in a global variable at the end of
+ # test_0005. The tip changeset hash should not be displayed here, but first_changeset_hash should.
+ tip_changeset_hash = self.get_repository_tip( repository )
+ # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+ # then directly load the url that the iframe should be loading and check for the expected strings.
+ # The iframe should point to /repository/view_repository?id=<encoded repository ID>
+ strings_displayed = [ '/repository', 'view_repository', 'id=' + encoded_repository_id ]
+ strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420', first_changeset_hash ]
+ strings_displayed_in_iframe.append( 'Sharable link to this repository revision:' )
+ strings_displayed_in_iframe.append( '%s/view/user1/filtering_0420/%s' % ( self.url, first_changeset_hash ) )
+ strings_not_displayed_in_iframe = [ tip_changeset_hash ]
+ self.load_citable_url( username='user1',
+ repository_name='filtering_0420',
+ changeset_revision=first_changeset_hash,
+ encoded_user_id=encoded_user_id,
+ encoded_repository_id=encoded_repository_id,
+ strings_displayed=strings_displayed,
+ strings_displayed_in_iframe=strings_displayed_in_iframe,
+ strings_not_displayed_in_iframe=strings_not_displayed_in_iframe )
+ def test_0030_load_sharable_url_with_invalid_changeset_revision( self ):
+ '''Load a citable url with an invalid changeset revision specified.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ encoded_user_id = self.security.encode_id( test_user_1.id )
+ encoded_repository_id = self.security.encode_id( repository.id )
+ changeset_hash = '!!invalid!!'
+ tip_revision = self.get_repository_tip( repository )
+ # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+ # then directly load the url that the iframe should be loading and check for the expected strings.
+ # The iframe should point to /repository/view_repository?id=<encoded repository ID>&status=error
+ strings_displayed = [ '/repository', 'view_repository', 'id=' + encoded_repository_id ]
+ strings_displayed.extend( [ 'The+change+log', 'does+not+include+revision', '%21%21invalid%21%21', 'status=error' ] )
+ strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420' ]
+ strings_displayed_in_iframe.append( 'Sharable link to this repository revision:' )
+ strings_displayed_in_iframe.append( '%s/view/user1/filtering_0420/%s' % ( self.url, changeset_hash ) )
+ strings_not_displayed_in_iframe = [ tip_revision ]
+ self.load_citable_url( username='user1',
+ repository_name='filtering_0420',
+ changeset_revision=changeset_hash,
+ encoded_user_id=encoded_user_id,
+ encoded_repository_id=encoded_repository_id,
+ strings_displayed=strings_displayed,
+ strings_displayed_in_iframe=strings_displayed_in_iframe,
+ strings_not_displayed_in_iframe=strings_not_displayed_in_iframe )
+ def test_0035_load_sharable_url_with_invalid_repository_name( self ):
+ '''Load a citable url with an invalid changeset revision specified.'''
+ '''
+ We are at step 7
+ Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/!!invalid!!
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ encoded_user_id = self.security.encode_id( test_user_1.id )
+ tip_revision = self.get_repository_tip( repository )
+ # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+ # then directly load the url that the iframe should be loading and check for the expected strings.
+ # The iframe should point to /repository/browse_repositories?user_id=<encoded user ID>&operation=repositories_by_user
+ strings_displayed = [ '/repository', 'browse_repositories', 'user1' ]
+ strings_displayed.extend( [ 'list+of+repositories+owned', 'does+not+include+one+named', '%21%21invalid%21%21', 'status=error' ] )
+ strings_displayed_in_iframe = [ 'user1', 'filtering_0420' ]
+ strings_displayed_in_iframe.append( 'Repositories owned by user1' )
+ strings_displayed_in_iframe.append( tip_revision )
+ self.load_citable_url( username='user1',
+ repository_name='!!invalid!!',
+ changeset_revision=None,
+ encoded_user_id=encoded_user_id,
+ encoded_repository_id=None,
+ strings_displayed=strings_displayed,
+ strings_displayed_in_iframe=strings_displayed_in_iframe )
+ def test_0040_load_sharable_url_with_invalid_owner( self ):
+ '''Load a citable url with an invalid owner.'''
+ '''
+ We are at step 8.
+ Visit the following url and check for appropriate strings: <tool shed base url>/view/!!invalid!!
+ '''
+ strings_displayed = [ 'The tool shed', self.url, 'contains no repositories owned by', '!!invalid!!' ]
+ self.load_citable_url( username='!!invalid!!',
+ repository_name=None,
+ changeset_revision=None,
+ encoded_user_id=None,
+ encoded_repository_id=None,
+ strings_displayed=strings_displayed )
+
\ No newline at end of file
diff -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd test/tool_shed/functional/test_1120_simple_repository_dependency_multiple_owners.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1120_simple_repository_dependency_multiple_owners.py
@@ -0,0 +1,190 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'blast_datatypes_0120'
+datatypes_repository_description = 'Galaxy applicable datatypes for BLAST'
+datatypes_repository_long_description = 'Galaxy datatypes for the BLAST top hit descriptons tool'
+
+tool_repository_name = 'blastxml_to_top_descr_0120'
+tool_repository_description = 'BLAST top hit descriptions'
+tool_repository_long_description = 'Make a table from BLAST XML'
+
+'''
+Tool shed side:
+
+1) Create and populate blast_datatypes_0120.
+1a) Check for appropriate strings.
+2) Create and populate blastxml_to_top_descr_0120.
+2a) Check for appropriate strings.
+3) Upload repository_dependencies.xml to blastxml_to_top_descr_0120 that defines a relationship to blast_datatypes_0120.
+3a) Check for appropriate strings.
+
+
+Galaxy side:
+
+1) Install blastxml_to_top_descr_0120, with repository dependencies.
+1a) Check for appropriate strings in the installed blastxml_to_top_descr_0120 and blast_datatypes_0120 repositories.
+'''
+
+running_standalone = False
+
+class TestInstallRepositoryMultipleOwners( ShedTwillTestCase ):
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other tests.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ test_user_2 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+ test_user_2_private_role = test_db_util.get_private_role( test_user_2 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_datatypes_repository( self ):
+ """Create and populate the blast_datatypes_0120 repository"""
+ """
+ We are at step 1.
+ Create and populate blast_datatypes.
+ """
+ category = self.create_category( name='Test 0120', description='Description of test 0120' )
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % datatypes_repository_name,
+ 'Repository %s has been created' % "'%s'" % datatypes_repository_name ]
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_2_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ filename='blast/blast_datatypes.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded blast_datatypes tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ def test_0010_verify_datatypes_repository( self ):
+ '''Verify the blast_datatypes_0120 repository.'''
+ '''
+ We are at step 1a.
+ Check for appropriate strings, most importantly BlastXml, BlastNucDb, and BlastProtDb,
+ the datatypes that are defined in datatypes_conf.xml.
+ '''
+ global repository_datatypes_count
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+ strings_displayed = [ 'BlastXml', 'BlastNucDb', 'BlastProtDb', 'application/xml', 'text/html', 'blastxml', 'blastdbn', 'blastdbp']
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ repository_datatypes_count = int( self.get_repository_datatypes_count( repository ) )
+ def test_0015_create_tool_repository( self ):
+ """Create and populate the blastxml_to_top_descr_0120 repository"""
+ """
+ We are at step 2.
+ Create and populate blastxml_to_top_descr_0120.
+ """
+ global running_standalone
+ category = self.create_category( name='Test 0120', description='Description of test 0120' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % tool_repository_name,
+ 'Repository %s has been created' % "'%s'" % tool_repository_name ]
+ repository = self.get_or_create_repository( name=tool_repository_name,
+ description=tool_repository_description,
+ long_description=tool_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ filename='blast/blastxml_to_top_descr.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded blastxml_to_top_descr tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ def test_0020_verify_tool_repository( self ):
+ '''Verify the blastxml_to_top_descr_0120 repository.'''
+ '''
+ We are at step 2a.
+ Check for appropriate strings, such as tool name, description, and version.
+ '''
+ repository = test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+ strings_displayed = [ 'blastxml_to_top_descr_0120', 'BLAST top hit descriptions', 'Make a table from BLAST XML' ]
+ strings_displayed.extend( [ '0.0.1', 'Valid tools'] )
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ def test_0025_create_repository_dependency( self ):
+ '''Create a repository dependency on blast_datatypes_0120.'''
+ '''
+ We are at step 3.
+ Create a simple repository dependency for blastxml_to_top_descr_0120 that defines a dependency on blast_datatypes_0120.
+ '''
+ global running_standalone
+ if running_standalone:
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+ dependency_xml_path = self.generate_temp_path( 'test_1120', additional_paths=[ 'dependencies' ] )
+ self.create_repository_dependency( repository=tool_repository, depends_on=[ datatypes_repository ], filepath=dependency_xml_path )
+ def test_0040_verify_repository_dependency( self ):
+ '''Verify the created repository dependency.'''
+ '''
+ We are at step 3a.
+ Check the newly created repository dependency to ensure that it was defined and displays correctly.
+ '''
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( tool_repository, datatypes_repository )
+ def test_0045_install_blastxml_to_top_descr( self ):
+ '''Install the blastxml_to_top_descr_0120 repository to Galaxy.'''
+ '''
+ We are at step 1, Galaxy side.
+ Install blastxml_to_top_descr_0120 to Galaxy, with repository dependencies, so that the datatypes repository is also installed.
+ '''
+ global base_datatypes_count
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ base_datatypes_count = int( self.get_datatypes_count() )
+ post_submit_strings_displayed = [ 'blastxml_to_top_descr_0120', 'blast_datatypes_0120', 'New' ]
+ self.install_repository( name='blastxml_to_top_descr_0120',
+ owner=common.test_user_1_name,
+ category_name='Test 0120',
+ install_repository_dependencies=True,
+ post_submit_strings_displayed=post_submit_strings_displayed,
+ new_tool_panel_section='Test 0120' )
+ def test_0050_verify_repository_installation( self ):
+ '''Verify installation of blastxml_to_top_descr_0120 and blast_datatypes_0120.'''
+ '''
+ We are at step 1a, Galaxy side.
+ Check that the blastxml_to_top_descr_0120 and blast_datatypes_0120 repositories installed correctly, and that there
+ are now new datatypes in the registry matching the ones defined in blast_datatypes_0120. Also check that
+ blast_datatypes_0120 is labeled as an installed repository dependency of blastxml_to_top_descr_0120.
+ '''
+ global repository_datatypes_count
+ global base_datatypes_count
+ tool_repository = test_db_util.get_installed_repository_by_name_owner( tool_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_installed_repository_by_name_owner( datatypes_repository_name, common.test_user_2_name )
+ current_datatypes = int( self.get_datatypes_count() )
+ expected_count = base_datatypes_count + repository_datatypes_count
+ assert current_datatypes == expected_count, 'Installing %s did not add new datatypes. Expected: %d. Found: %d' % \
+ ( 'blastxml_to_top_descr_0120', expected_count, current_datatypes )
+ strings_displayed = [ 'Installed repository dependencies', 'user1', 'blast_datatypes_0120' ]
+ strings_displayed.extend( [ 'Valid tools', 'BLAST top hit', 'Make a table', datatypes_repository.installed_changeset_revision ] )
+ self.display_installed_repository_manage_page( tool_repository, strings_displayed=strings_displayed )
+ strings_displayed = [ 'Datatypes', 'blastxml', 'blastdbp', 'blastdbn', 'BlastXml', 'BlastNucDb', 'BlastProtDb' ]
+ strings_displayed.extend( [ 'application/xml', 'text/html' ] )
+ self.display_installed_repository_manage_page( datatypes_repository, strings_displayed=strings_displayed )
diff -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd test/tool_shed/test_data/blast/blast_datatypes.tar
Binary file test/tool_shed/test_data/blast/blast_datatypes.tar has changed
diff -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 -r ca145a616aee1ccb2499b115be9aeacc9ca2ecbd test/tool_shed/test_data/blast/blastxml_to_top_descr.tar
Binary file test/tool_shed/test_data/blast/blastxml_to_top_descr.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for installing a repository from the tool shed that includes a repository dependency defintion where the required repository is owned by a user other than the dependent repository.
by Bitbucket 18 Feb '13
by Bitbucket 18 Feb '13
18 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9e36d1ed099f/
changeset: 9e36d1ed099f
user: greg
date: 2013-02-18 21:28:06
summary: Fix for installing a repository from the tool shed that includes a repository dependency defintion where the required repository is owned by a user other than the dependent repository.
affected #: 3 files
diff -r 84b6c4608e357a466948d895d90d1a1d77f3cd71 -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -720,7 +720,11 @@
for rd_tup in rd_tups:
tool_shed, name, owner, changeset_revision = rd_tup
# Updates to installed repository revisions may have occurred, so make sure to locate the appropriate repository revision if one exists.
- repository, current_changeset_revision = repository_was_previously_installed( trans, tool_shed, name, repo_info_tuple )
+ # We need to create a temporary repo_info_tuple that includes the correct repository owner which we get from the current rd_tup. The current
+ # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td )
+ tmp_clone_url = suc.generate_clone_url_from_repo_info_tup( rd_tup )
+ tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
+ repository, current_changeset_revision = repository_was_previously_installed( trans, tool_shed, name, tmp_repo_info_tuple )
if repository:
new_rd_tup = [ tool_shed, name, owner, changeset_revision, repository.id, repository.status ]
if repository.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
diff -r 84b6c4608e357a466948d895d90d1a1d77f3cd71 -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1068,6 +1068,12 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+def generate_clone_url_from_repo_info_tup( repo_info_tup ):
+ """Generate teh URL for cloning a repositoyr given a tuple of toolshed, name, owner, changeset_revision."""
+ # Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab']
+ toolshed, name, owner, changeset_revision = repo_info_tup
+ # Don't include the changeset_revision in clone urls.
+ return url_join( toolshed, 'repos', owner, name )
def generate_data_manager_metadata( app, repository, repo_dir, data_manager_config_filename, metadata_dict, shed_config_dict=None ):
"""Update the received metadata_dict with information from the parsed data_manager_config_filename."""
if data_manager_config_filename is None:
diff -r 84b6c4608e357a466948d895d90d1a1d77f3cd71 -r 9e36d1ed099fb31eccbc6bf55f20c1bd3fec1773 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1472,12 +1472,15 @@
repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
metadata = repository_metadata.metadata
- if not includes_tools and 'tools' in metadata:
- includes_tools = True
- if not has_repository_dependencies and 'repository_dependencies' in metadata:
- has_repository_dependencies = True
- if not includes_tool_dependencies and 'tool_dependencies' in metadata:
- includes_tool_dependencies = True
+ if not includes_tools:
+ if 'tools' in metadata:
+ includes_tools = True
+ if not has_repository_dependencies:
+ if 'repository_dependencies' in metadata:
+ has_repository_dependencies = True
+ if not includes_tool_dependencies:
+ if 'tool_dependencies' in metadata:
+ includes_tool_dependencies = True
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/84b6c4608e35/
changeset: 84b6c4608e35
user: dan
date: 2013-02-18 21:10:57
summary: Fix typo in 7d0a0ca401f7.
affected #: 1 file
diff -r 7d0a0ca401f7cee8b2862c4191f4c1c073cc538d -r 84b6c4608e357a466948d895d90d1a1d77f3cd71 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1418,7 +1418,7 @@
# Handle Data Managers
valid_data_managers = None
invalid_data_managers = None
- data_manager_errors = None
+ data_managers_errors = None
if 'data_manager' in metadata:
valid_data_managers = metadata['data_manager'].get( 'data_managers', None )
invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/337e5b6bd7dc/
changeset: 337e5b6bd7dc
branch: stable
user: greg
date: 2013-02-18 17:04:29
summary: Fix for handling proprietary datatyps definitions in repositories being installed from the tool shed when only datatyes that subclass from those in the Galaxy frameowrk are defined (i.e., there are no proprietary datatype class files for the datatypes). In addition, this changeset now properly handles installing a tool shed repository that included prioretary datatype definitions (they were failing due to a bug in this same method).
affected #: 1 file
diff -r 8cd0793b0fc65b57f8fd93cfffc19ba105d190cf -r 337e5b6bd7dc5696087c436e54192149c1bc8bc3 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -120,12 +120,24 @@
be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
has been initialized, the registry's contents cannot be overridden by conflicting data types.
"""
- tree = util.parse_xml( datatypes_config )
+ try:
+ tree = util.parse_xml( datatypes_config )
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( datatypes_config, str( e ) ) )
+ return None, None
datatypes_config_root = tree.getroot()
- # Path to datatype converters
- converter_path = None
- # Path to datatype display applications
- display_path = None
+ registration = datatypes_config_root.find( 'registration' )
+ if registration is None:
+ # We have valid XML, but not a valid proprietary datatypes definition.
+ return None, None
+ sniffers = datatypes_config_root.find( 'sniffers' )
+ converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
+ if converter_path:
+ # Path to datatype converters
+ registration.attrib[ 'proprietary_converter_path' ] = converter_path
+ if display_path:
+ # Path to datatype display applications
+ registration.attrib[ 'proprietary_display_path' ] = display_path
relative_path_to_datatype_file_name = None
datatype_files = datatypes_config_root.find( 'datatype_files' )
datatype_class_modules = []
@@ -148,12 +160,6 @@
break
break
if datatype_class_modules:
- registration = datatypes_config_root.find( 'registration' )
- converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
- if converter_path:
- registration.attrib[ 'proprietary_converter_path' ] = converter_path
- if display_path:
- registration.attrib[ 'proprietary_display_path' ] = display_path
for relative_path_to_datatype_file_name in datatype_class_modules:
datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
for elem in registration.findall( 'datatype' ):
@@ -170,20 +176,16 @@
# The value of proprietary_path must be an absolute path due to job_working_directory.
elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
- sniffers = datatypes_config_root.find( 'sniffers' )
- else:
- sniffers = None
- fd, proprietary_datatypes_config = tempfile.mkstemp()
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<datatypes>\n' )
- os.write( fd, '%s' % util.xml_to_string( registration ) )
- if sniffers:
- os.write( fd, '%s' % util.xml_to_string( sniffers ) )
- os.write( fd, '</datatypes>\n' )
- os.close( fd )
- os.chmod( proprietary_datatypes_config, 0644 )
- else:
- proprietary_datatypes_config = datatypes_config
+ # Temporarily persist the proprietary datatypes configuration file so it can be loaded into the datatypes registry.
+ fd, proprietary_datatypes_config = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '%s' % util.xml_to_string( registration ) )
+ if sniffers:
+ os.write( fd, '%s' % util.xml_to_string( sniffers ) )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ os.chmod( proprietary_datatypes_config, 0644 )
# Load proprietary datatypes
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
if datatype_files:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Display Data Managers on repository info pagses in Tool Shed and Galaxy.
by Bitbucket 18 Feb '13
by Bitbucket 18 Feb '13
18 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7d0a0ca401f7/
changeset: 7d0a0ca401f7
user: dan
date: 2013-02-18 19:46:08
summary: Display Data Managers on repository info pagses in Tool Shed and Galaxy.
affected #: 4 files
diff -r 852b9d795423015cba17d6c0d5b402f1055404dd -r 7d0a0ca401f7cee8b2862c4191f4c1c073cc538d lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1415,6 +1415,14 @@
valid_tools = metadata.get( 'tools', None )
# Handle workflows.
workflows = metadata.get( 'workflows', None )
+ # Handle Data Managers
+ valid_data_managers = None
+ invalid_data_managers = None
+ data_manager_errors = None
+ if 'data_manager' in metadata:
+ valid_data_managers = metadata['data_manager'].get( 'data_managers', None )
+ invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
+ data_managers_errors = metadata['data_manager'].get( 'messages', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
repository=repository,
datatypes=datatypes,
@@ -1426,6 +1434,9 @@
tool_dependencies=installed_tool_dependencies,
valid_tools=valid_tools,
workflows=workflows,
+ valid_data_managers=valid_data_managers,
+ invalid_data_managers=invalid_data_managers,
+ data_managers_errors=data_managers_errors,
new_install=False,
reinstalling=reinstalling )
else:
@@ -1458,6 +1469,9 @@
tool_dependencies=installed_tool_dependencies,
valid_tools=None,
workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None,
+ data_managers_errors=None,
new_install=True,
reinstalling=False )
# Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container.
diff -r 852b9d795423015cba17d6c0d5b402f1055404dd -r 7d0a0ca401f7cee8b2862c4191f4c1c073cc538d lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -188,8 +188,8 @@
log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
return readme_files_dict
def build_repository_containers_for_galaxy( trans, repository, datatypes, invalid_tools, missing_repository_dependencies, missing_tool_dependencies,
- readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, new_install=False,
- reinstalling=False ):
+ readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, valid_data_managers,
+ invalid_data_managers, data_managers_errors, new_install=False, reinstalling=False ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -199,7 +199,9 @@
missing_repository_dependencies=None,
tool_dependencies=None,
valid_tools=None,
- workflows=None )
+ workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None )
# Some of the tool dependency folders will include links to display tool dependency information, and some of these links require the repository
# id. However we need to be careful because sometimes the repository object is None.
if repository:
@@ -294,6 +296,19 @@
repository_id=repository_id,
label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
+ if valid_data_managers:
+ folder_id, valid_data_managers_root_folder = container_util.build_data_managers_folder( trans=trans,
+ folder_id=folder_id,
+ data_managers=valid_data_managers,
+ label='Valid Data Managers' )
+ containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder
+ if invalid_data_managers or data_managers_errors:
+ folder_id, invalid_data_managers_root_folder = container_util.build_invalid_data_managers_folder( trans=trans,
+ folder_id=folder_id,
+ data_managers=invalid_data_managers,
+ error_messages=data_managers_errors,
+ label='Invalid Data Managers' )
+ containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
finally:
@@ -307,7 +322,9 @@
repository_dependencies=None,
tool_dependencies=None,
valid_tools=None,
- workflows=None )
+ workflows=None,
+ valid_data_managers=None
+ )
if repository_metadata:
metadata = repository_metadata.metadata
lock = threading.Lock()
@@ -399,6 +416,17 @@
repository_id=None,
label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
+ # Valid Data Managers container
+ if metadata:
+ if 'data_manager' in metadata:
+ data_managers = metadata['data_manager'].get( 'data_managers', None )
+ folder_id, data_managers_root_folder = container_util.build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" )
+ containers_dict[ 'valid_data_managers' ] = data_managers_root_folder
+ error_messages = metadata['data_manager'].get( 'error_messages', None )
+ data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
+ folder_id, data_managers_root_folder = container_util.build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" )
+ containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder
+
except Exception, e:
log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
finally:
@@ -1044,20 +1072,6 @@
"""Update the received metadata_dict with information from the parsed data_manager_config_filename."""
if data_manager_config_filename is None:
return metadata_dict
- try:
- tree = util.parse_xml( data_manager_config_filename )
- except Exception, e:
- log.error( 'There was an error parsing your Data Manager config file "%s": %s' % ( data_manager_config_filename, e ) )
- return metadata_dict #we are not able to load any data managers
- tool_path = None
- if shed_config_dict:
- tool_path = shed_config_dict.get( 'tool_path', None )
- tools = {}
- for tool in metadata_dict.get( 'tools', [] ):
- tool_conf_name = tool['tool_config']
- if tool_path:
- tool_conf_name = os.path.join( tool_path, tool_conf_name )
- tools[tool_conf_name] = tool
repo_path = repository.repo_path( app )
try:
#Galaxy Side
@@ -1071,37 +1085,65 @@
relative_data_manager_dir = util.relpath( os.path.split( data_manager_config_filename )[0], repo_dir )
rel_data_manager_config_filename = os.path.join( relative_data_manager_dir, os.path.split( data_manager_config_filename )[1] )
data_managers = {}
- data_manager_metadata = { 'config_filename': rel_data_manager_config_filename, 'data_managers': data_managers }#'tool_config_files': tool_files }
+ invalid_data_managers = []
+ data_manager_metadata = { 'config_filename': rel_data_manager_config_filename, 'data_managers': data_managers, 'invalid_data_managers': invalid_data_managers, 'error_messages': [] }#'tool_config_files': tool_files }
metadata_dict[ 'data_manager' ] = data_manager_metadata
+ try:
+ tree = util.parse_xml( data_manager_config_filename )
+ except Exception, e:
+ error_message = 'There was an error parsing your Data Manager config file "%s": %s' % ( data_manager_config_filename, e )
+ log.error( error_message )
+ data_manager_metadata['error_messages'].append( error_message )
+ return metadata_dict #we are not able to load any data managers
+ tool_path = None
+ if shed_config_dict:
+ tool_path = shed_config_dict.get( 'tool_path', None )
+ tools = {}
+ for tool in metadata_dict.get( 'tools', [] ):
+ tool_conf_name = tool['tool_config']
+ if tool_path:
+ tool_conf_name = os.path.join( tool_path, tool_conf_name )
+ tools[tool_conf_name] = tool
root = tree.getroot()
data_manager_tool_path = root.get( 'tool_path', None )
if data_manager_tool_path:
relative_data_manager_dir = os.path.join( relative_data_manager_dir, data_manager_tool_path )
- for data_manager_elem in root.findall( 'data_manager' ):
+ for i, data_manager_elem in enumerate( root.findall( 'data_manager' ) ):
tool_file = data_manager_elem.get( 'tool_file', None )
data_manager_id = data_manager_elem.get( 'id', None )
if data_manager_id is None:
log.error( 'Data Manager entry is missing id attribute in "%s".' % ( data_manager_config_filename ) )
+ invalid_data_managers.append( { 'index': i, 'error_message': 'Data Manager entry is missing id attribute' } )
continue
+ data_manager_name = data_manager_elem.get( 'name', data_manager_id ) #fix me, default behavior is to fall back to tool.name
version = data_manager_elem.get( 'version', DataManager.DEFAULT_VERSION )
guid = generate_guid_for_object( repository_clone_url, DataManager.GUID_TYPE, data_manager_id, version )
data_tables = []
if tool_file is None:
log.error( 'Data Manager entry is missing tool_file attribute in "%s".' % ( data_manager_config_filename ) )
+ invalid_data_managers.append( { 'index': i, 'error_message': 'Data Manager entry is missing tool_file attribute' } )
+ continue
else:
+ bad_data_table = False
for data_table_elem in data_manager_elem.findall( 'data_table' ):
data_table_name = data_table_elem.get( 'name', None )
if data_table_name is None:
- log.error( 'Data Manager data_table entry is name attribute in "%s".' % ( data_manager_config_filename ) )
+ log.error( 'Data Manager data_table entry is missing name attribute in "%s".' % ( data_manager_config_filename ) )
+ invalid_data_managers.append( { 'index': i, 'error_message': 'Data Manager entry is missing name attribute' } )
+ bad_data_table = True
+ break
else:
data_tables.append( data_table_name )
+ if bad_data_table:
+ continue
data_manager_metadata_tool_file = os.path.join( relative_data_manager_dir, tool_file )
tool_metadata_tool_file = os.path.join( repo_files_directory, data_manager_metadata_tool_file )
tool = tools.get( tool_metadata_tool_file, None )
if tool is None:
log.error( "Unable to determine tools metadata for '%s'." % ( data_manager_metadata_tool_file ) )
+ invalid_data_managers.append( { 'index': i, 'error_message': 'Unable to determine tools metadata' } )
continue
- data_managers[ data_manager_id ] = { 'guid': guid, 'version': version, 'tool_config_file': data_manager_metadata_tool_file, 'data_tables': data_tables, 'tool_guid': tool['guid'] }
+ data_managers[ data_manager_id ] = { 'id': data_manager_id, 'name': data_manager_name, 'guid': guid, 'version': version, 'tool_config_file': data_manager_metadata_tool_file, 'data_tables': data_tables, 'tool_guid': tool['guid'] }
log.debug( 'Loaded Data Manager tool_files: %s' % ( tool_file ) )
return metadata_dict
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
diff -r 852b9d795423015cba17d6c0d5b402f1055404dd -r 7d0a0ca401f7cee8b2862c4191f4c1c073cc538d lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -19,6 +19,8 @@
self.invalid_tool_dependencies = []
self.invalid_tools = []
self.valid_tools = []
+ self.valid_data_managers = []
+ self.invalid_data_managers = []
self.tool_dependencies = []
self.repository_dependencies = []
self.readme_files = []
@@ -47,6 +49,14 @@
repository_owner=owner,
changeset_revision=changeset_revision )
+class DataManager( object ):
+ """Data Manager object"""
+ def __init__( self, id=None, name=None, version=None, data_tables=None ):
+ self.id = id
+ self.name = name
+ self.version = version
+ self.data_tables = data_tables
+
class Datatype( object ):
"""Datatype object"""
def __init__( self, id=None, extension=None, type=None, mimetype=None, subclass=None ):
@@ -56,6 +66,13 @@
self.mimetype = mimetype
self.subclass = subclass
+class InvalidDataManager( object ):
+ """Data Manager object"""
+ def __init__( self, id=None, index=None, error=None ):
+ self.id = id
+ self.index = index
+ self.error = error
+
class InvalidRepositoryDependency( object ):
"""Invalid repository dependency definition object"""
def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, error=None ):
@@ -151,6 +168,35 @@
self.repository_metadata_id = repository_metadata_id
self.repository_id = repository_id
+def build_data_managers_folder( trans, folder_id, data_managers, label=None ):
+ """Return a folder hierarchy containing Data Managers."""
+ if data_managers:
+ if label is None:
+ label = "Data Managers"
+ data_manager_id = 0
+ folder_id += 1
+ data_managers_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+ folder_id += 1
+ key = "valid_data_managers"
+ folder = Folder( id=folder_id, key=key, label=label, parent=data_managers_root_folder )
+ data_managers_root_folder.folders.append( folder )
+ # Insert a header row.
+ data_manager_id += 1
+ data_manager = DataManager( id=data_manager_id,
+ name='Name',
+ version='Version',
+ data_tables='Data Tables' )
+ folder.valid_data_managers.append( data_manager )
+ for data_manager_dict in data_managers.itervalues():
+ data_manager_id += 1
+ data_manager = DataManager( id=data_manager_id,
+ name=data_manager_dict.get( 'name', '' ),
+ version=data_manager_dict.get( 'version', '' ),
+ data_tables=", ".join( data_manager_dict.get( 'data_tables', '' ) ) )
+ folder.valid_data_managers.append( data_manager )
+ else:
+ data_managers_root_folder = None
+ return folder_id, data_managers_root_folder
def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ):
"""Return a folder hierarchy containing datatypes."""
if datatypes:
@@ -179,6 +225,42 @@
else:
datatypes_root_folder = None
return folder_id, datatypes_root_folder
+def build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages=None, label=None ):
+ """Return a folder hierarchy containing invalid Data Managers."""
+ if data_managers or error_messages:
+ if label is None:
+ label = "Invalid Data Managers"
+ data_manager_id = 0
+ folder_id += 1
+ data_managers_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+ folder_id += 1
+ key = "invalid_data_managers"
+ folder = Folder( id=folder_id, key=key, label=label, parent=data_managers_root_folder )
+ data_managers_root_folder.folders.append( folder )
+ # Insert a header row.
+ data_manager_id += 1
+ data_manager = InvalidDataManager( id=data_manager_id,
+ index='Element Index',
+ error='Error' )
+ folder.invalid_data_managers.append( data_manager )
+ if error_messages:
+ for error_message in error_messages:
+ data_manager_id += 1
+ data_manager = InvalidDataManager( id=data_manager_id,
+ index=0,
+ error=error_message )
+ folder.invalid_data_managers.append( data_manager )
+ has_errors = True
+ for data_manager_dict in data_managers:
+ data_manager_id += 1
+ data_manager = InvalidDataManager( id=data_manager_id,
+ index=data_manager_dict.get( 'index', 0 ) + 1,
+ error=data_manager_dict.get( 'error_message', '' ) )
+ folder.invalid_data_managers.append( data_manager )
+ has_errors = True
+ else:
+ data_managers_root_folder = None
+ return folder_id, data_managers_root_folder
def build_invalid_repository_dependencies_root_folder( trans, folder_id, invalid_repository_dependencies_dict ):
"""Return a folder hierarchy containing invalid repository dependencies."""
label = 'Invalid repository dependencies'
diff -r 852b9d795423015cba17d6c0d5b402f1055404dd -r 7d0a0ca401f7cee8b2862c4191f4c1c073cc538d templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -248,6 +248,14 @@
else:
folder_label = "%s<i> - click the name to view an SVG image of the workflow</i>" % folder_label
col_span_str = 'colspan="4"'
+ elif folder.valid_data_managers:
+ if folder.description:
+ folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+ col_span_str = 'colspan="3"'
+ elif folder.invalid_data_managers:
+ if folder.description:
+ folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+ col_span_str = 'colspan="2"'
%><td ${col_span_str} style="padding-left: ${folder_pad}px;"><span class="expandLink folder-${encoded_id}-click">
@@ -305,6 +313,18 @@
${render_datatype( datatype, pad, my_row, row_counter, row_is_header )}
%endfor
%endif
+ %if folder.valid_data_managers:
+ %for index, data_manager in enumerate( folder.valid_data_managers ):
+ <% row_is_header = index == 0 %>
+ ${render_valid_data_manager( data_manager, pad, my_row, row_counter, row_is_header )}
+ %endfor
+ %endif
+ %if folder.invalid_data_managers:
+ %for index, data_manager in enumerate( folder.invalid_data_managers ):
+ <% row_is_header = index == 0 %>
+ ${render_invalid_data_manager( data_manager, pad, my_row, row_counter, row_is_header )}
+ %endfor
+ %endif
</%def><%def name="render_datatype( datatype, pad, parent, row_counter, row_is_header=False )">
@@ -331,6 +351,51 @@
%></%def>
+<%def name="render_valid_data_manager( data_manager, pad, parent, row_counter, row_is_header=False )">
+ <%
+ encoded_id = trans.security.encode_id( data_manager.id )
+ if row_is_header:
+ cell_type = 'th'
+ else:
+ cell_type = 'td'
+ %>
+ <tr class="datasetRow"
+ %if parent is not None:
+ parent="${parent}"
+ %endif
+ id="libraryItem-${encoded_id}">
+ <${cell_type} style="padding-left: ${pad+20}px;">${data_manager.name | h}</${cell_type}>
+ <${cell_type}>${data_manager.version | h}</${cell_type}>
+ <${cell_type}>${data_manager.data_tables | h}</${cell_type}>
+ </tr>
+ <%
+ my_row = row_counter.count
+ row_counter.increment()
+ %>
+</%def>
+
+<%def name="render_invalid_data_manager( data_manager, pad, parent, row_counter, row_is_header=False )">
+ <%
+ encoded_id = trans.security.encode_id( data_manager.id )
+ if row_is_header:
+ cell_type = 'th'
+ else:
+ cell_type = 'td'
+ %>
+ <tr class="datasetRow"
+ %if parent is not None:
+ parent="${parent}"
+ %endif
+ id="libraryItem-${encoded_id}">
+ <${cell_type} style="padding-left: ${pad+20}px;">${data_manager.index | h}</${cell_type}>
+ <${cell_type}>${data_manager.error | h}</${cell_type}>
+ </tr>
+ <%
+ my_row = row_counter.count
+ row_counter.increment()
+ %>
+</%def>
+
<%def name="render_invalid_repository_dependency( invalid_repository_dependency, pad, parent, row_counter )"><%
encoded_id = trans.security.encode_id( invalid_repository_dependency.id )
@@ -638,6 +703,8 @@
missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
valid_tools_root_folder = containers_dict.get( 'valid_tools', None )
workflows_root_folder = containers_dict.get( 'workflows', None )
+ valid_data_managers_root_folder = containers_dict.get( 'valid_data_managers', None )
+ invalid_data_managers_root_folder = containers_dict.get( 'invalid_data_managers', None )
has_contents = datatypes_root_folder or invalid_tools_root_folder or valid_tools_root_folder or workflows_root_folder
has_dependencies = \
@@ -736,6 +803,20 @@
${render_folder( invalid_tools_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
</table>
%endif
+ %if valid_data_managers_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="valid_tools">
+ ${render_folder( valid_data_managers_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
+ %if invalid_data_managers_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_tools">
+ ${render_folder( invalid_data_managers_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
%if workflows_root_folder:
<p/><% row_counter = RowCounter() %>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/852b9d795423/
changeset: 852b9d795423
user: greg
date: 2013-02-18 17:04:29
summary: Fix for handling proprietary datatyps definitions in repositories being installed from the tool shed when only datatyes that subclass from those in the Galaxy frameowrk are defined (i.e., there are no proprietary datatype class files for the datatypes). In addition, this changeset now properly handles installing a tool shed repository that included prioretary datatype definitions (they were failing due to a bug in this same method).
affected #: 1 file
diff -r 36ab5b39b985d84d02b449eeaa4876ad73190317 -r 852b9d795423015cba17d6c0d5b402f1055404dd lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -125,12 +125,24 @@
be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
has been initialized, the registry's contents cannot be overridden by conflicting data types.
"""
- tree = util.parse_xml( datatypes_config )
+ try:
+ tree = util.parse_xml( datatypes_config )
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( datatypes_config, str( e ) ) )
+ return None, None
datatypes_config_root = tree.getroot()
- # Path to datatype converters
- converter_path = None
- # Path to datatype display applications
- display_path = None
+ registration = datatypes_config_root.find( 'registration' )
+ if registration is None:
+ # We have valid XML, but not a valid proprietary datatypes definition.
+ return None, None
+ sniffers = datatypes_config_root.find( 'sniffers' )
+ converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
+ if converter_path:
+ # Path to datatype converters
+ registration.attrib[ 'proprietary_converter_path' ] = converter_path
+ if display_path:
+ # Path to datatype display applications
+ registration.attrib[ 'proprietary_display_path' ] = display_path
relative_path_to_datatype_file_name = None
datatype_files = datatypes_config_root.find( 'datatype_files' )
datatype_class_modules = []
@@ -153,12 +165,6 @@
break
break
if datatype_class_modules:
- registration = datatypes_config_root.find( 'registration' )
- converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
- if converter_path:
- registration.attrib[ 'proprietary_converter_path' ] = converter_path
- if display_path:
- registration.attrib[ 'proprietary_display_path' ] = display_path
for relative_path_to_datatype_file_name in datatype_class_modules:
datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
for elem in registration.findall( 'datatype' ):
@@ -175,20 +181,16 @@
# The value of proprietary_path must be an absolute path due to job_working_directory.
elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
- sniffers = datatypes_config_root.find( 'sniffers' )
- else:
- sniffers = None
- fd, proprietary_datatypes_config = tempfile.mkstemp()
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<datatypes>\n' )
- os.write( fd, '%s' % util.xml_to_string( registration ) )
- if sniffers:
- os.write( fd, '%s' % util.xml_to_string( sniffers ) )
- os.write( fd, '</datatypes>\n' )
- os.close( fd )
- os.chmod( proprietary_datatypes_config, 0644 )
- else:
- proprietary_datatypes_config = datatypes_config
+ # Temporarily persist the proprietary datatypes configuration file so it can be loaded into the datatypes registry.
+ fd, proprietary_datatypes_config = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '%s' % util.xml_to_string( registration ) )
+ if sniffers:
+ os.write( fd, '%s' % util.xml_to_string( sniffers ) )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ os.chmod( proprietary_datatypes_config, 0644 )
# Load proprietary datatypes
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
if datatype_files:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
18 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/36ab5b39b985/
changeset: 36ab5b39b985
user: greg
date: 2013-02-18 16:01:40
summary: A bit of messaging cleanup in the tool shed.
affected #: 3 files
diff -r 131f63d242216522ffb06cada3684fbf93fe9254 -r 36ab5b39b985d84d02b449eeaa4876ad73190317 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1166,6 +1166,33 @@
else:
valid_tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
return valid_tool_dependencies_dict
+def generate_message_for_invalid_repository_dependencies( metadata_dict ):
+ """Return the error message associated with an invalid repository dependency for display in the caller."""
+ message = ''
+ if metadata_dict:
+ invalid_repository_dependencies_dict = metadata_dict.get( 'invalid_repository_dependencies', None )
+ if invalid_repository_dependencies_dict:
+ invalid_repository_dependencies = invalid_repository_dependencies_dict[ 'invalid_repository_dependencies' ]
+ for repository_dependency_tup in invalid_repository_dependencies:
+ toolshed, name, owner, changeset_revision, error = repository_dependency_tup
+ if error:
+ message = '%s ' % str( error )
+ return message
+def generate_message_for_invalid_tool_dependencies( metadata_dict ):
+ """
+ Due to support for orphan tool dependencies (which are always valid) tool dependency definitions can only be invalid if they include a definition for a complex
+ repository dependency and the repository dependency definition is invalid. This method retrieves the error message associated with the invalid tool dependency
+ for display in the caller.
+ """
+ message = ''
+ if metadata_dict:
+ invalid_tool_dependencies = metadata_dict.get( 'invalid_tool_dependencies', None )
+ if invalid_tool_dependencies:
+ for td_key, requirement_dict in invalid_tool_dependencies.items():
+ error = requirement_dict.get( 'error', None )
+ if error:
+ message = '%s ' % str( error )
+ return message
def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
if as_html:
new_line = '<br/>'
@@ -1211,7 +1238,6 @@
is considered valid but perhaps an orphan due to it's actual invalidity.
"""
message = ''
- status = 'done'
if metadata_dict:
orphan_tool_dependencies = metadata_dict.get( 'orphan_tool_dependencies', None )
if orphan_tool_dependencies:
@@ -1237,8 +1263,7 @@
version = requirements_dict[ 'version' ]
message += "<b>* name:</b> %s, <b>type:</b> %s, <b>version:</b> %s<br/>" % ( str( name ), str( type ), str( version ) )
message += "<br/>"
- status = 'warning'
- return message, status
+ return message
def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url, shed_config_dict=None, relative_install_dir=None,
repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False,
persist=False ):
diff -r 131f63d242216522ffb06cada3684fbf93fe9254 -r 36ab5b39b985d84d02b449eeaa4876ad73190317 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -2000,10 +2000,11 @@
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None )
- if metadata:
- if 'orphan_tool_dependencies' in metadata:
- orphan_message, status = suc.generate_message_for_orphan_tool_dependencies( metadata )
- message += orphan_message
+ # Handle messaging for orphan tool dependencies.
+ orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata )
+ if orphan_message:
+ message += orphan_message
+ status = 'warning'
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
@@ -2752,10 +2753,11 @@
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None )
- if metadata:
- if 'orphan_tool_dependencies' in metadata:
- orphan_message, status = suc.generate_message_for_orphan_tool_dependencies( metadata )
- message += orphan_message
+ # Handle messaging for orphan tool dependencies.
+ orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata )
+ if orphan_message:
+ message += orphan_message
+ status = 'warning'
else:
repository_metadata_id = None
metadata = None
diff -r 131f63d242216522ffb06cada3684fbf93fe9254 -r 36ab5b39b985d84d02b449eeaa4876ad73190317 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -167,33 +167,29 @@
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
- # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
- # or some other problem.
if repository.metadata_revisions:
# A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload.
metadata_dict = repository.metadata_revisions[0].metadata
else:
metadata_dict = {}
+ # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
+ # or some other problem. Tool dependency definitions can define orphan tool dependencies (no relationship to any tools contained in the repository),
+ # so warning messages are important because orphans are always valid. The repository owner must be warned in case they did not intend to define an
+ # orphan dependency, but simply provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
# Handle messaging for orphan tool dependencies.
- orphan_message, status = suc.generate_message_for_orphan_tool_dependencies( metadata_dict )
+ orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata_dict )
if orphan_message:
message += orphan_message
- # Display message for invalid tool sependencies.
- invalid_tool_dependencies = metadata_dict.get( 'invalid_tool_dependencies', None )
- if invalid_tool_dependencies:
- for td_key, requirement_dict in invalid_tool_dependencies.items():
- error = requirement_dict.get( 'error', None )
- if error:
- message = "%s %s" % ( message, str( error ) )
+ status = 'warning'
+ # Handle messaging for invalid tool dependencies.
+ invalid_tool_dependencies_message = suc.generate_message_for_invalid_tool_dependencies( metadata_dict )
+ if invalid_tool_dependencies_message:
+ message += invalid_tool_dependencies_message
status = 'error'
- # Display message for invalid repository dependencies.
- invalid_repository_dependencies_dict = metadata_dict.get( 'invalid_repository_dependencies', None )
- if invalid_repository_dependencies_dict:
- invalid_repository_dependencies = invalid_repository_dependencies_dict[ 'invalid_repository_dependencies' ]
- for repository_dependency_tup in invalid_repository_dependencies:
- toolshed, name, owner, changeset_revision, error = repository_dependency_tup
- if error:
- message += "%s %s" % ( message, str( error ) )
+ # Handle messaging for invalid repository dependencies.
+ invalid_repository_dependencies_message = suc.generate_message_for_invalid_repository_dependencies( metadata_dict )
+ if invalid_repository_dependencies_message:
+ message += invalid_repository_dependencies_message
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
suc.reset_tool_data_tables( trans.app )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Do not try to display non-text datasets in collaboration framework.
by Bitbucket 18 Feb '13
by Bitbucket 18 Feb '13
18 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/131f63d24221/
changeset: 131f63d24221
user: jgoecks
date: 2013-02-18 15:46:35
summary: Do not try to display non-text datasets in collaboration framework.
affected #: 2 files
diff -r 09c81e81952d4768d63fde4840f31958d8e23041 -r 131f63d242216522ffb06cada3684fbf93fe9254 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -20,6 +20,7 @@
from galaxy.exceptions import *
from galaxy.model import NoConverterException, ConverterDependencyException
from galaxy.datatypes.interval import ChromatinInteractions
+from galaxy.datatypes.data import Text
from Cheetah.Template import Template
@@ -270,17 +271,22 @@
def get_data( self, dataset, preview=True ):
""" Gets a dataset's data. """
+
# Get data from file, truncating if necessary.
truncated = False
dataset_data = None
if os.path.exists( dataset.file_name ):
- max_peek_size = 1000000 # 1 MB
- if preview and os.stat( dataset.file_name ).st_size > max_peek_size:
- dataset_data = open( dataset.file_name ).read(max_peek_size)
- truncated = True
+ if isinstance( dataset.datatype, Text ):
+ max_peek_size = 1000000 # 1 MB
+ if preview and os.stat( dataset.file_name ).st_size > max_peek_size:
+ dataset_data = open( dataset.file_name ).read(max_peek_size)
+ truncated = True
+ else:
+ dataset_data = open( dataset.file_name ).read(max_peek_size)
+ truncated = False
else:
- dataset_data = open( dataset.file_name ).read(max_peek_size)
- truncated = False
+ # For now, cannot get data from non-text datasets.
+ dataset_data = None
return truncated, dataset_data
def check_dataset_state( self, trans, dataset ):
diff -r 09c81e81952d4768d63fde4840f31958d8e23041 -r 131f63d242216522ffb06cada3684fbf93fe9254 templates/webapps/galaxy/dataset/display.mako
--- a/templates/webapps/galaxy/dataset/display.mako
+++ b/templates/webapps/galaxy/dataset/display.mako
@@ -61,7 +61,7 @@
<%def name="render_item( data, data_to_render )">
## Chunkable data is rendered in JavaScript above; render unchunkable data below.
- %if not data.datatype.CHUNKABLE:
+ %if not data.datatype.CHUNKABLE and data_to_render:
%if truncated:
<div class="warningmessagelarge">
This dataset is large and only the first megabyte is shown below. |
@@ -70,6 +70,8 @@
%endif
## TODO: why is the default font size so small?
<pre style="font-size: 135%">${ data_to_render | h }</pre>
+ %else:
+ <p align='center'>Cannot show dataset content</p>
%endif
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Move grid element initialization to main body to prevent it from happening too early. Pack scripts.
by Bitbucket 17 Feb '13
by Bitbucket 17 Feb '13
17 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/09c81e81952d/
changeset: 09c81e81952d
user: jgoecks
date: 2013-02-17 20:59:02
summary: Move grid element initialization to main body to prevent it from happening too early. Pack scripts.
affected #: 4 files
diff -r 40a7ef4efb22afdc6bde4bce4e75d691e4ac7a7f -r 09c81e81952d4768d63fde4840f31958d8e23041 static/scripts/galaxy.grids.js
--- a/static/scripts/galaxy.grids.js
+++ b/static/scripts/galaxy.grids.js
@@ -4,18 +4,6 @@
// not appended to the identifier of a nested array.
jQuery.ajaxSettings.traditional = true;
-// Initialize grid objects on load.
-$(document).ready(function() {
- init_grid_elements();
- init_grid_controls();
-
- // Initialize text filters to select text on click and use normal font when user is typing.
- $('input[type=text]').each(function() {
- $(this).click(function() { $(this).select(); } )
- .keyup(function () { $(this).css("font-style", "normal"); });
- });
-});
-
/**
* A Galaxy grid.
*/
diff -r 40a7ef4efb22afdc6bde4bce4e75d691e4ac7a7f -r 09c81e81952d4768d63fde4840f31958d8e23041 static/scripts/packed/galaxy.grids.js
--- a/static/scripts/packed/galaxy.grids.js
+++ b/static/scripts/packed/galaxy.grids.js
@@ -1,1 +1,1 @@
-jQuery.ajaxSettings.traditional=true;$(document).ready(function(){init_grid_elements();init_grid_controls();$("input[type=text]").each(function(){$(this).click(function(){$(this).select()}).keyup(function(){$(this).css("font-style","normal")})})});var Grid=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(a){return _.indexOf(this.attributes.async_ops,a)!==-1},add_filter:function(e,f,b){if(b){var c=this.attributes.filters[e],a;if(c===null||c===undefined){a=f}else{if(typeof(c)=="string"){if(c=="All"){a=f}else{var d=[];d[0]=c;d[1]=f;a=d}}else{a=c;a.push(f)}}this.attributes.filters[e]=a}else{this.attributes.filters[e]=f}},remove_filter:function(b,e){var a=this.attributes.filters[b];if(a===null||a===undefined){return false}var d=true;if(typeof(a)==="string"){if(a=="All"){d=false}else{delete this.attributes.filters[b]}}else{var c=_.indexOf(a,e);if(c!==-1){a.splice(c,1)}else{d=false}}return d},get_url_data:function(){var a={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes};if(this.attributes.operation){a.operation=this.attributes.operation}if(this.attributes.item_ids){a.id=this.attributes.item_ids}var b=this;_.each(_.keys(b.attributes.filters),function(c){a["f-"+c]=b.attributes.filters[c]});return a}});function init_operation_buttons(){$("input[name=operation]:submit").each(function(){$(this).click(function(){var b=$(this).val();var a=[];$("input[name=id]:checked").each(function(){a.push($(this).val())});do_operation(b,a)})})}function init_grid_controls(){init_operation_buttons();$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});$(".sort-link").each(function(){$(this).click(function(){set_sort_condition($(this).attr("sort_key"));return false})});$(".page-link > a").each(function(){$(this).click(function(){set_page($(this).attr("page_num"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var d=$(this).attr("column_key");var c=$("#input-"+d+"-filter");var e=c.val();c.val("");add_filter_condition(d,e,true);return false})});var a=$("#input-tags-filter");if(a.length){a.autocomplete(history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var b=$("#input-name-filter");if(b.length){b.autocomplete(history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})}function init_grid_elements(){$(".grid").each(function(){var b=$(this).find("input.grid-row-select-checkbox");var a=$(this).find("span.grid-selected-count");var c=function(){a.text($(b).filter(":checked").length)};$(b).each(function(){$(this).change(c)});c()});$(".label").each(function(){var a=$(this).attr("href");if(a!==undefined&&a.indexOf("operation=")!=-1){$(this).click(function(){do_operation_from_href($(this).attr("href"));return false})}});$(".community_rating_star").rating({});make_popup_menus()}function go_page_one(){var a=grid.get("cur_page");if(a!==null&&a!==undefined&&a!=="all"){grid.set("cur_page",1)}}function add_filter_condition(c,e,a){if(e===""){return false}grid.add_filter(c,e,a);var d=$("<span>"+e+"<a href='javascript:void(0);'><span class='delete-search-icon' /></a></span>");d.addClass("text-filter-val");d.click(function(){grid.remove_filter(c,e);$(this).remove();go_page_one();update_grid()});var b=$("#"+c+"-filtering-criteria");b.append(d);go_page_one();update_grid()}function add_tag_to_grid_filter(c,b){var a=c+(b!==undefined&&b!==""?":"+b:"");$("#advanced-search").show("fast");add_filter_condition("tags",a,true)}function set_sort_condition(f){var e=grid.get("sort_key");var d=f;if(e.indexOf(f)!==-1){if(e.substring(0,1)!=="-"){d="-"+f}else{}}$(".sort-arrow").remove();var c=(d.substring(0,1)=="-")?"↑":"↓";var a=$("<span>"+c+"</span>").addClass("sort-arrow");var b=$("#"+f+"-header");b.append(a);grid.set("sort_key",d);go_page_one();update_grid()}function set_categorical_filter(b,d){var a=grid.get("categorical_filters")[b],c=grid.get("filters")[b];$("."+b+"-filter").each(function(){var h=$.trim($(this).text());var f=a[h];var g=f[b];if(g==d){$(this).empty();$(this).addClass("current-filter");$(this).append(h)}else{if(g==c){$(this).empty();var e=$("<a href='#'>"+h+"</a>");e.click(function(){set_categorical_filter(b,g)});$(this).removeClass("current-filter");$(this).append(e)}}});grid.add_filter(b,d);go_page_one();update_grid()}function set_page(a){$(".page-link").each(function(){var g=$(this).attr("id"),e=parseInt(g.split("-")[2],10),c=grid.get("cur_page"),f;if(e===a){f=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(f)}else{if(e===c){f=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var d=$("<a href='#'>"+f+"</a>");d.click(function(){set_page(e)});$(this).append(d)}}});var b=true;if(a==="all"){grid.set("cur_page",a);b=false}else{grid.set("cur_page",parseInt(a,10))}update_grid(b)}function do_operation(b,a){b=b.toLowerCase();grid.set({operation:b,item_ids:a});if(grid.can_async_op(b)){update_grid(true)}else{go_to_URL()}}function do_operation_from_href(c){var f=c.split("?");if(f.length>1){var a=f[1];var e=a.split("&");var b=null;var g=-1;for(var d=0;d<e.length;d++){if(e[d].indexOf("operation")!=-1){b=e[d].split("=")[1]}else{if(e[d].indexOf("id")!=-1){g=e[d].split("=")[1]}}}do_operation(b,g);return false}}function go_to_URL(){grid.set("async",false);window.location=grid.get("url_base")+"?"+$.param(grid.get_url_data())}function update_grid(a){if(!grid.get("async")){go_to_URL();return}var b=(grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();$.ajax({type:b,url:grid.get("url_base"),data:grid.get_url_data(),error:function(){alert("Grid refresh failed")},success:function(d){var c=d.split("*****");$("#grid-table-body").html(c[0]);$("#grid-table-footer").html(c[1]);$("#grid-table-body").trigger("update");init_grid_elements();init_operation_buttons();make_popup_menus();$(".loading-elt-overlay").hide();var e=$.trim(c[2]);if(e!==""){$("#grid-message").html(e).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){grid.set({operation:undefined,item_ids:undefined})}})}function check_all_items(){var a=document.getElementById("check_all"),b=document.getElementsByTagName("input"),d=0,c;if(a.checked===true){for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=true;d++}}}else{for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=false}}}init_grid_elements()};
\ No newline at end of file
+jQuery.ajaxSettings.traditional=true;var Grid=Backbone.Model.extend({defaults:{url_base:"",async:false,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:false,cur_page:1,num_pages:1,operation:undefined,item_ids:undefined},can_async_op:function(a){return _.indexOf(this.attributes.async_ops,a)!==-1},add_filter:function(e,f,b){if(b){var c=this.attributes.filters[e],a;if(c===null||c===undefined){a=f}else{if(typeof(c)=="string"){if(c=="All"){a=f}else{var d=[];d[0]=c;d[1]=f;a=d}}else{a=c;a.push(f)}}this.attributes.filters[e]=a}else{this.attributes.filters[e]=f}},remove_filter:function(b,e){var a=this.attributes.filters[b];if(a===null||a===undefined){return false}var d=true;if(typeof(a)==="string"){if(a=="All"){d=false}else{delete this.attributes.filters[b]}}else{var c=_.indexOf(a,e);if(c!==-1){a.splice(c,1)}else{d=false}}return d},get_url_data:function(){var a={async:this.attributes.async,sort:this.attributes.sort_key,page:this.attributes.cur_page,show_item_checkboxes:this.attributes.show_item_checkboxes};if(this.attributes.operation){a.operation=this.attributes.operation}if(this.attributes.item_ids){a.id=this.attributes.item_ids}var b=this;_.each(_.keys(b.attributes.filters),function(c){a["f-"+c]=b.attributes.filters[c]});return a}});function init_operation_buttons(){$("input[name=operation]:submit").each(function(){$(this).click(function(){var b=$(this).val();var a=[];$("input[name=id]:checked").each(function(){a.push($(this).val())});do_operation(b,a)})})}function init_grid_controls(){init_operation_buttons();$(".submit-image").each(function(){$(this).mousedown(function(){$(this).addClass("gray-background")});$(this).mouseup(function(){$(this).removeClass("gray-background")})});$(".sort-link").each(function(){$(this).click(function(){set_sort_condition($(this).attr("sort_key"));return false})});$(".page-link > a").each(function(){$(this).click(function(){set_page($(this).attr("page_num"));return false})});$(".categorical-filter > a").each(function(){$(this).click(function(){set_categorical_filter($(this).attr("filter_key"),$(this).attr("filter_val"));return false})});$(".text-filter-form").each(function(){$(this).submit(function(){var d=$(this).attr("column_key");var c=$("#input-"+d+"-filter");var e=c.val();c.val("");add_filter_condition(d,e,true);return false})});var a=$("#input-tags-filter");if(a.length){a.autocomplete(history_tag_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}var b=$("#input-name-filter");if(b.length){b.autocomplete(history_name_autocomplete_url,{selectFirst:false,autoFill:false,highlight:false,mustMatch:false})}$(".advanced-search-toggle").each(function(){$(this).click(function(){$("#standard-search").slideToggle("fast");$("#advanced-search").slideToggle("fast");return false})})}function init_grid_elements(){$(".grid").each(function(){var b=$(this).find("input.grid-row-select-checkbox");var a=$(this).find("span.grid-selected-count");var c=function(){a.text($(b).filter(":checked").length)};$(b).each(function(){$(this).change(c)});c()});$(".label").each(function(){var a=$(this).attr("href");if(a!==undefined&&a.indexOf("operation=")!=-1){$(this).click(function(){do_operation_from_href($(this).attr("href"));return false})}});$(".community_rating_star").rating({});make_popup_menus()}function go_page_one(){var a=grid.get("cur_page");if(a!==null&&a!==undefined&&a!=="all"){grid.set("cur_page",1)}}function add_filter_condition(c,e,a){if(e===""){return false}grid.add_filter(c,e,a);var d=$("<span>"+e+"<a href='javascript:void(0);'><span class='delete-search-icon' /></a></span>");d.addClass("text-filter-val");d.click(function(){grid.remove_filter(c,e);$(this).remove();go_page_one();update_grid()});var b=$("#"+c+"-filtering-criteria");b.append(d);go_page_one();update_grid()}function add_tag_to_grid_filter(c,b){var a=c+(b!==undefined&&b!==""?":"+b:"");$("#advanced-search").show("fast");add_filter_condition("tags",a,true)}function set_sort_condition(f){var e=grid.get("sort_key");var d=f;if(e.indexOf(f)!==-1){if(e.substring(0,1)!=="-"){d="-"+f}else{}}$(".sort-arrow").remove();var c=(d.substring(0,1)=="-")?"↑":"↓";var a=$("<span>"+c+"</span>").addClass("sort-arrow");var b=$("#"+f+"-header");b.append(a);grid.set("sort_key",d);go_page_one();update_grid()}function set_categorical_filter(b,d){var a=grid.get("categorical_filters")[b],c=grid.get("filters")[b];$("."+b+"-filter").each(function(){var h=$.trim($(this).text());var f=a[h];var g=f[b];if(g==d){$(this).empty();$(this).addClass("current-filter");$(this).append(h)}else{if(g==c){$(this).empty();var e=$("<a href='#'>"+h+"</a>");e.click(function(){set_categorical_filter(b,g)});$(this).removeClass("current-filter");$(this).append(e)}}});grid.add_filter(b,d);go_page_one();update_grid()}function set_page(a){$(".page-link").each(function(){var g=$(this).attr("id"),e=parseInt(g.split("-")[2],10),c=grid.get("cur_page"),f;if(e===a){f=$(this).children().text();$(this).empty();$(this).addClass("inactive-link");$(this).text(f)}else{if(e===c){f=$(this).text();$(this).empty();$(this).removeClass("inactive-link");var d=$("<a href='#'>"+f+"</a>");d.click(function(){set_page(e)});$(this).append(d)}}});var b=true;if(a==="all"){grid.set("cur_page",a);b=false}else{grid.set("cur_page",parseInt(a,10))}update_grid(b)}function do_operation(b,a){b=b.toLowerCase();grid.set({operation:b,item_ids:a});if(grid.can_async_op(b)){update_grid(true)}else{go_to_URL()}}function do_operation_from_href(c){var f=c.split("?");if(f.length>1){var a=f[1];var e=a.split("&");var b=null;var g=-1;for(var d=0;d<e.length;d++){if(e[d].indexOf("operation")!=-1){b=e[d].split("=")[1]}else{if(e[d].indexOf("id")!=-1){g=e[d].split("=")[1]}}}do_operation(b,g);return false}}function go_to_URL(){grid.set("async",false);window.location=grid.get("url_base")+"?"+$.param(grid.get_url_data())}function update_grid(a){if(!grid.get("async")){go_to_URL();return}var b=(grid.get("operation")?"POST":"GET");$(".loading-elt-overlay").show();$.ajax({type:b,url:grid.get("url_base"),data:grid.get_url_data(),error:function(){alert("Grid refresh failed")},success:function(d){var c=d.split("*****");$("#grid-table-body").html(c[0]);$("#grid-table-footer").html(c[1]);$("#grid-table-body").trigger("update");init_grid_elements();init_operation_buttons();make_popup_menus();$(".loading-elt-overlay").hide();var e=$.trim(c[2]);if(e!==""){$("#grid-message").html(e).show();setTimeout(function(){$("#grid-message").hide()},5000)}},complete:function(){grid.set({operation:undefined,item_ids:undefined})}})}function check_all_items(){var a=document.getElementById("check_all"),b=document.getElementsByTagName("input"),d=0,c;if(a.checked===true){for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=true;d++}}}else{for(c=0;c<b.length;c++){if(b[c].name.indexOf("id")!==-1){b[c].checked=false}}}init_grid_elements()};
\ No newline at end of file
diff -r 40a7ef4efb22afdc6bde4bce4e75d691e4ac7a7f -r 09c81e81952d4768d63fde4840f31958d8e23041 static/scripts/packed/viz/visualization.js
--- a/static/scripts/packed/viz/visualization.js
+++ b/static/scripts/packed/viz/visualization.js
@@ -1,1 +1,1 @@
-define(["libs/underscore","mvc/data","viz/trackster/util","utils/config"],function(s,i,l,p){var a=function(u,x,w,v){$.ajax({url:u,data:w,error:function(){alert("Grid failed")},success:function(y){show_modal("Select datasets for new tracks",y,{Cancel:function(){hide_modal()},Add:function(){var z=[];$("input[name=id]:checked,input[name=ldda_ids]:checked").each(function(){var A={data_type:"track_config",hda_ldda:"hda"},B=$(this).val();if($(this).attr("name")!=="id"){A.hda_ldda="ldda"}z[z.length]=$.ajax({url:x+"/"+B,data:A,dataType:"json"})});$.when.apply($,z).then(function(){var A=(arguments[0] instanceof Array?$.map(arguments,function(B){return B[0]}):[arguments[0]]);v(A)});hide_modal()}})}})};var j=function(u){return("isResolved" in u)};var f=function(u){this.default_font=u!==undefined?u:"9px Monaco, Lucida Console, monospace";this.dummy_canvas=this.new_canvas();this.dummy_context=this.dummy_canvas.getContext("2d");this.dummy_context.font=this.default_font;this.char_width_px=this.dummy_context.measureText("A").width;this.patterns={};this.load_pattern("right_strand","/visualization/strand_right.png");this.load_pattern("left_strand","/visualization/strand_left.png");this.load_pattern("right_strand_inv","/visualization/strand_right_inv.png");this.load_pattern("left_strand_inv","/visualization/strand_left_inv.png")};s.extend(f.prototype,{load_pattern:function(u,y){var v=this.patterns,w=this.dummy_context,x=new Image();x.src=galaxy_paths.attributes.image_path+y;x.onload=function(){v[u]=w.createPattern(x,"repeat")}},get_pattern:function(u){return this.patterns[u]},new_canvas:function(){var u=$("<canvas/>")[0];if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(u)}u.manager=this;return u}});var q=Backbone.Model.extend({defaults:{num_elements:20,obj_cache:null,key_ary:null},initialize:function(u){this.clear()},get_elt:function(v){var w=this.attributes.obj_cache,x=this.attributes.key_ary,u=x.indexOf(v);if(u!==-1){if(w[v].stale){x.splice(u,1);delete w[v]}else{this.move_key_to_end(v,u)}}return w[v]},set_elt:function(v,x){var y=this.attributes.obj_cache,z=this.attributes.key_ary,w=this.attributes.num_elements;if(!y[v]){if(z.length>=w){var u=z.shift();delete y[u]}z.push(v)}y[v]=x;return x},move_key_to_end:function(v,u){this.attributes.key_ary.splice(u,1);this.attributes.key_ary.push(v)},clear:function(){this.attributes.obj_cache={};this.attributes.key_ary=[]},size:function(){return this.attributes.key_ary.length}});var d=q.extend({defaults:s.extend({},q.prototype.defaults,{dataset:null,init_data:null,filters_manager:null,data_type:"data",data_mode_compatible:function(u,v){return true},can_subset:function(u){return false}}),initialize:function(u){q.prototype.initialize.call(this);var v=this.get("init_data");if(v){this.add_data(v)}},add_data:function(u){if(this.get("num_elements")<u.length){this.set("num_elements",u.length)}var v=this;s.each(u,function(w){v.set_data(w.region,w)})},data_is_ready:function(){var x=this.get("dataset"),w=$.Deferred(),u=(this.get("data_type")=="raw_data"?"state":this.get("data_type")=="data"?"converted_datasets_state":"error"),v=new l.ServerStateDeferred({ajax_settings:{url:this.get("dataset").url(),data:{hda_ldda:x.get("hda_ldda"),data_type:u},dataType:"json"},interval:5000,success_fn:function(y){return y!=="pending"}});$.when(v.go()).then(function(y){w.resolve(y==="ok"||y==="data")});return w},search_features:function(u){var v=this.get("dataset"),w={query:u,hda_ldda:v.get("hda_ldda"),data_type:"features"};return $.getJSON(v.url(),w)},load_data:function(C,B,v,A){var y=this.get("dataset"),x={data_type:this.get("data_type"),chrom:C.get("chrom"),low:C.get("start"),high:C.get("end"),mode:B,resolution:v,hda_ldda:y.get("hda_ldda")};$.extend(x,A);var E=this.get("filters_manager");if(E){var F=[];var u=E.filters;for(var z=0;z<u.length;z++){F.push(u[z].name)}x.filter_cols=JSON.stringify(F)}var w=this,D=$.getJSON(y.url(),x,function(G){w.set_data(C,G)});this.set_data(C,D);return D},get_data:function(A,z,w,y){var B=this.get_elt(A);if(B&&(j(B)||this.get("data_mode_compatible")(B,z))){return B}var C=this.get("key_ary"),v=this.get("obj_cache"),D,u;for(var x=0;x<C.length;x++){D=C[x];u=new g({from_str:D});if(u.contains(A)){B=v[D];if(j(B)||(this.get("data_mode_compatible")(B,z)&&this.get("can_subset")(B))){this.move_key_to_end(D,x);return B}}}return this.load_data(A,z,w,y)},set_data:function(v,u){this.set_elt(v,u)},DEEP_DATA_REQ:"deep",BROAD_DATA_REQ:"breadth",get_more_data:function(C,B,x,A,y){var E=this._mark_stale(C);if(!(E&&this.get("data_mode_compatible")(E,B))){console.log("ERROR: problem with getting more data: current data is not compatible");return}var w=C.get("start");if(y===this.DEEP_DATA_REQ){$.extend(A,{start_val:E.data.length+1})}else{if(y===this.BROAD_DATA_REQ){w=(E.max_high?E.max_high:E.data[E.data.length-1][2])+1}}var D=C.copy().set("start",w);var v=this,z=this.load_data(D,B,x,A),u=$.Deferred();this.set_data(C,u);$.when(z).then(function(F){if(F.data){F.data=E.data.concat(F.data);if(F.max_low){F.max_low=E.max_low}if(F.message){F.message=F.message.replace(/[0-9]+/,F.data.length)}}v.set_data(C,F);u.resolve(F)});return u},can_get_more_detailed_data:function(v){var u=this.get_elt(v);return(u.dataset_type==="bigwig"&&u.data.length<8000)},get_more_detailed_data:function(x,z,v,y,w){var u=this._mark_stale(x);if(!u){console.log("ERROR getting more detailed data: no current data");return}if(!w){w={}}if(u.dataset_type==="bigwig"){w.num_samples=1000*y}else{if(u.dataset_type==="summary_tree"){w.level=Math.min(u.level-1,2)}}return this.load_data(x,z,v,w)},_mark_stale:function(v){var u=this.get_elt(v);if(!u){console.log("ERROR: no data to mark as stale: ",this.get("dataset"),v.toString())}u.stale=true;return u},get_genome_wide_data:function(u){var w=this,y=true,x=s.map(u.get("chroms_info").chrom_info,function(A){var z=w.get_elt(new g({chrom:A.chrom,start:0,end:A.len}));if(!z){y=false}return z});if(y){return x}var v=$.Deferred();$.getJSON(this.get("dataset").url(),{data_type:"genome_data"},function(z){w.add_data(z.data);v.resolve(z.data)});return v},get_elt:function(u){return q.prototype.get_elt.call(this,u.toString())},set_elt:function(v,u){return q.prototype.set_elt.call(this,v.toString(),u)}});var n=d.extend({initialize:function(u){var v=new Backbone.Model();v.urlRoot=u.data_url;this.set("dataset",v)},load_data:function(w,x,u,v){if(u>1){return{data:null}}return d.prototype.load_data.call(this,w,x,u,v)}});var c=Backbone.Model.extend({defaults:{name:null,key:null,chroms_info:null},initialize:function(u){this.id=u.dbkey},get_chroms_info:function(){return this.attributes.chroms_info.chrom_info},get_chrom_region:function(u){var v=s.find(this.get_chroms_info(),function(w){return w.chrom==u});return new g({chrom:v.chrom,end:v.len})}});var g=Backbone.RelationalModel.extend({defaults:{chrom:null,start:0,end:0},initialize:function(v){if(v.from_str){var x=v.from_str.split(":"),w=x[0],u=x[1].split("-");this.set({chrom:w,start:parseInt(u[0],10),end:parseInt(u[1],10)})}},copy:function(){return new g({chrom:this.get("chrom"),start:this.get("start"),end:this.get("end")})},length:function(){return this.get("end")-this.get("start")},toString:function(){return this.get("chrom")+":"+this.get("start")+"-"+this.get("end")},toJSON:function(){return{chrom:this.get("chrom"),start:this.get("start"),end:this.get("end")}},compute_overlap:function(B){var v=this.get("chrom"),A=B.get("chrom"),z=this.get("start"),x=B.get("start"),y=this.get("end"),w=B.get("end"),u;if(v&&A&&v!==A){return g.overlap_results.DIF_CHROMS}if(z<x){if(y<x){u=g.overlap_results.BEFORE}else{if(y<=w){u=g.overlap_results.OVERLAP_START}else{u=g.overlap_results.CONTAINS}}}else{if(z>w){u=g.overlap_results.AFTER}else{if(y<=w){u=g.overlap_results.CONTAINED_BY}else{u=g.overlap_results.OVERLAP_END}}}return u},contains:function(u){return this.compute_overlap(u)===g.overlap_results.CONTAINS},overlaps:function(u){return s.intersection([this.compute_overlap(u)],[g.overlap_results.DIF_CHROMS,g.overlap_results.BEFORE,g.overlap_results.AFTER]).length===0}},{overlap_results:{DIF_CHROMS:1000,BEFORE:1001,CONTAINS:1002,OVERLAP_START:1003,OVERLAP_END:1004,CONTAINED_BY:1005,AFTER:1006}});var m=Backbone.Collection.extend({model:g});var e=Backbone.RelationalModel.extend({defaults:{region:null,note:""},relations:[{type:Backbone.HasOne,key:"region",relatedModel:g}]});var r=Backbone.Collection.extend({model:e});var t=i.Dataset.extend({initialize:function(u){this.set("id",u.dataset_id);this.set("config",p.ConfigSettingCollection.from_config_dict(u.prefs));this.get("config").add([{key:"name",value:this.get("name")},{key:"color"}]);var v=this.get("preloaded_data");if(v){v=v.data}else{v=[]}this.set("data_manager",new d({dataset:this,init_data:v}))}});var o=Backbone.RelationalModel.extend({defaults:{title:"",type:""},url:galaxy_paths.get("visualization_url"),save:function(){return $.ajax({url:this.url(),type:"POST",dataType:"json",data:{vis_json:JSON.stringify(this)}})}});var k=o.extend({defaults:s.extend({},o.prototype.defaults,{dbkey:"",tracks:null,bookmarks:null,viewport:null}),relations:[{type:Backbone.HasMany,key:"tracks",relatedModel:t}],add_tracks:function(u){this.get("tracks").add(u)}});var b=Backbone.Model.extend({});var h=Backbone.Router.extend({initialize:function(v){this.view=v.view;this.route(/([\w]+)$/,"change_location");this.route(/([\w]+\:[\d,]+-[\d,]+)$/,"change_location");var u=this;u.view.on("navigate",function(w){u.navigate(w)})},change_location:function(u){this.view.go_to(u)}});return{BackboneTrack:t,BrowserBookmark:e,BrowserBookmarkCollection:r,Cache:q,CanvasManager:f,Genome:c,GenomeDataManager:d,GenomeRegion:g,GenomeRegionCollection:m,GenomeVisualization:k,ReferenceTrackDataManager:n,TrackBrowserRouter:h,TrackConfig:b,Visualization:o,select_datasets:a}});
\ No newline at end of file
+define(["libs/underscore","mvc/data","viz/trackster/util","utils/config"],function(s,i,l,p){var a=function(u,x,w,v){$.ajax({url:u,data:w,error:function(){alert("Grid failed")},success:function(y){show_modal("Select datasets for new tracks",y,{Cancel:function(){hide_modal()},Add:function(){var z=[];$("input[name=id]:checked,input[name=ldda_ids]:checked").each(function(){var A={data_type:"track_config",hda_ldda:"hda"},B=$(this).val();if($(this).attr("name")!=="id"){A.hda_ldda="ldda"}z[z.length]=$.ajax({url:x+"/"+B,data:A,dataType:"json"})});$.when.apply($,z).then(function(){var A=(arguments[0] instanceof Array?$.map(arguments,function(B){return B[0]}):[arguments[0]]);v(A)});hide_modal()}})}})};var j=function(u){return("isResolved" in u)};var f=function(u){this.default_font=u!==undefined?u:"9px Monaco, Lucida Console, monospace";this.dummy_canvas=this.new_canvas();this.dummy_context=this.dummy_canvas.getContext("2d");this.dummy_context.font=this.default_font;this.char_width_px=this.dummy_context.measureText("A").width;this.patterns={};this.load_pattern("right_strand","/visualization/strand_right.png");this.load_pattern("left_strand","/visualization/strand_left.png");this.load_pattern("right_strand_inv","/visualization/strand_right_inv.png");this.load_pattern("left_strand_inv","/visualization/strand_left_inv.png")};s.extend(f.prototype,{load_pattern:function(u,y){var v=this.patterns,w=this.dummy_context,x=new Image();x.src=galaxy_paths.attributes.image_path+y;x.onload=function(){v[u]=w.createPattern(x,"repeat")}},get_pattern:function(u){return this.patterns[u]},new_canvas:function(){var u=$("<canvas/>")[0];if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(u)}u.manager=this;return u}});var q=Backbone.Model.extend({defaults:{num_elements:20,obj_cache:null,key_ary:null},initialize:function(u){this.clear()},get_elt:function(v){var w=this.attributes.obj_cache,x=this.attributes.key_ary,u=x.indexOf(v);if(u!==-1){if(w[v].stale){x.splice(u,1);delete w[v]}else{this.move_key_to_end(v,u)}}return w[v]},set_elt:function(v,x){var y=this.attributes.obj_cache,z=this.attributes.key_ary,w=this.attributes.num_elements;if(!y[v]){if(z.length>=w){var u=z.shift();delete y[u]}z.push(v)}y[v]=x;return x},move_key_to_end:function(v,u){this.attributes.key_ary.splice(u,1);this.attributes.key_ary.push(v)},clear:function(){this.attributes.obj_cache={};this.attributes.key_ary=[]},size:function(){return this.attributes.key_ary.length}});var d=q.extend({defaults:s.extend({},q.prototype.defaults,{dataset:null,init_data:null,filters_manager:null,data_type:"data",data_mode_compatible:function(u,v){return true},can_subset:function(u){return false}}),initialize:function(u){q.prototype.initialize.call(this);var v=this.get("init_data");if(v){this.add_data(v)}},add_data:function(u){if(this.get("num_elements")<u.length){this.set("num_elements",u.length)}var v=this;s.each(u,function(w){v.set_data(w.region,w)})},data_is_ready:function(){var x=this.get("dataset"),w=$.Deferred(),u=(this.get("data_type")==="raw_data"?"state":this.get("data_type")==="data"?"converted_datasets_state":"error"),v=new l.ServerStateDeferred({ajax_settings:{url:this.get("dataset").url(),data:{hda_ldda:x.get("hda_ldda"),data_type:u},dataType:"json"},interval:5000,success_fn:function(y){return y!=="pending"}});$.when(v.go()).then(function(y){w.resolve(y==="ok"||y==="data")});return w},search_features:function(u){var v=this.get("dataset"),w={query:u,hda_ldda:v.get("hda_ldda"),data_type:"features"};return $.getJSON(v.url(),w)},load_data:function(C,B,v,A){var y=this.get("dataset"),x={data_type:this.get("data_type"),chrom:C.get("chrom"),low:C.get("start"),high:C.get("end"),mode:B,resolution:v,hda_ldda:y.get("hda_ldda")};$.extend(x,A);var E=this.get("filters_manager");if(E){var F=[];var u=E.filters;for(var z=0;z<u.length;z++){F.push(u[z].name)}x.filter_cols=JSON.stringify(F)}var w=this,D=$.getJSON(y.url(),x,function(G){w.set_data(C,G)});this.set_data(C,D);return D},get_data:function(A,z,w,y){var B=this.get_elt(A);if(B&&(j(B)||this.get("data_mode_compatible")(B,z))){return B}var C=this.get("key_ary"),v=this.get("obj_cache"),D,u;for(var x=0;x<C.length;x++){D=C[x];u=new g({from_str:D});if(u.contains(A)){B=v[D];if(j(B)||(this.get("data_mode_compatible")(B,z)&&this.get("can_subset")(B))){this.move_key_to_end(D,x);return B}}}return this.load_data(A,z,w,y)},set_data:function(v,u){this.set_elt(v,u)},DEEP_DATA_REQ:"deep",BROAD_DATA_REQ:"breadth",get_more_data:function(C,B,x,A,y){var E=this._mark_stale(C);if(!(E&&this.get("data_mode_compatible")(E,B))){console.log("ERROR: problem with getting more data: current data is not compatible");return}var w=C.get("start");if(y===this.DEEP_DATA_REQ){$.extend(A,{start_val:E.data.length+1})}else{if(y===this.BROAD_DATA_REQ){w=(E.max_high?E.max_high:E.data[E.data.length-1][2])+1}}var D=C.copy().set("start",w);var v=this,z=this.load_data(D,B,x,A),u=$.Deferred();this.set_data(C,u);$.when(z).then(function(F){if(F.data){F.data=E.data.concat(F.data);if(F.max_low){F.max_low=E.max_low}if(F.message){F.message=F.message.replace(/[0-9]+/,F.data.length)}}v.set_data(C,F);u.resolve(F)});return u},can_get_more_detailed_data:function(v){var u=this.get_elt(v);return(u.dataset_type==="bigwig"&&u.data.length<8000)},get_more_detailed_data:function(x,z,v,y,w){var u=this._mark_stale(x);if(!u){console.log("ERROR getting more detailed data: no current data");return}if(!w){w={}}if(u.dataset_type==="bigwig"){w.num_samples=1000*y}else{if(u.dataset_type==="summary_tree"){w.level=Math.min(u.level-1,2)}}return this.load_data(x,z,v,w)},_mark_stale:function(v){var u=this.get_elt(v);if(!u){console.log("ERROR: no data to mark as stale: ",this.get("dataset"),v.toString())}u.stale=true;return u},get_genome_wide_data:function(u){var w=this,y=true,x=s.map(u.get("chroms_info").chrom_info,function(A){var z=w.get_elt(new g({chrom:A.chrom,start:0,end:A.len}));if(!z){y=false}return z});if(y){return x}var v=$.Deferred();$.getJSON(this.get("dataset").url(),{data_type:"genome_data"},function(z){w.add_data(z.data);v.resolve(z.data)});return v},get_elt:function(u){return q.prototype.get_elt.call(this,u.toString())},set_elt:function(v,u){return q.prototype.set_elt.call(this,v.toString(),u)}});var n=d.extend({initialize:function(u){var v=new Backbone.Model();v.urlRoot=u.data_url;this.set("dataset",v)},load_data:function(w,x,u,v){if(u>1){return{data:null}}return d.prototype.load_data.call(this,w,x,u,v)}});var c=Backbone.Model.extend({defaults:{name:null,key:null,chroms_info:null},initialize:function(u){this.id=u.dbkey},get_chroms_info:function(){return this.attributes.chroms_info.chrom_info},get_chrom_region:function(u){var v=s.find(this.get_chroms_info(),function(w){return w.chrom===u});return new g({chrom:v.chrom,end:v.len})}});var g=Backbone.RelationalModel.extend({defaults:{chrom:null,start:0,end:0},initialize:function(v){if(v.from_str){var x=v.from_str.split(":"),w=x[0],u=x[1].split("-");this.set({chrom:w,start:parseInt(u[0],10),end:parseInt(u[1],10)})}},copy:function(){return new g({chrom:this.get("chrom"),start:this.get("start"),end:this.get("end")})},length:function(){return this.get("end")-this.get("start")},toString:function(){return this.get("chrom")+":"+this.get("start")+"-"+this.get("end")},toJSON:function(){return{chrom:this.get("chrom"),start:this.get("start"),end:this.get("end")}},compute_overlap:function(B){var v=this.get("chrom"),A=B.get("chrom"),z=this.get("start"),x=B.get("start"),y=this.get("end"),w=B.get("end"),u;if(v&&A&&v!==A){return g.overlap_results.DIF_CHROMS}if(z<x){if(y<x){u=g.overlap_results.BEFORE}else{if(y<=w){u=g.overlap_results.OVERLAP_START}else{u=g.overlap_results.CONTAINS}}}else{if(z>w){u=g.overlap_results.AFTER}else{if(y<=w){u=g.overlap_results.CONTAINED_BY}else{u=g.overlap_results.OVERLAP_END}}}return u},contains:function(u){return this.compute_overlap(u)===g.overlap_results.CONTAINS},overlaps:function(u){return s.intersection([this.compute_overlap(u)],[g.overlap_results.DIF_CHROMS,g.overlap_results.BEFORE,g.overlap_results.AFTER]).length===0}},{overlap_results:{DIF_CHROMS:1000,BEFORE:1001,CONTAINS:1002,OVERLAP_START:1003,OVERLAP_END:1004,CONTAINED_BY:1005,AFTER:1006}});var m=Backbone.Collection.extend({model:g});var e=Backbone.RelationalModel.extend({defaults:{region:null,note:""},relations:[{type:Backbone.HasOne,key:"region",relatedModel:g}]});var r=Backbone.Collection.extend({model:e});var t=i.Dataset.extend({initialize:function(u){this.set("id",u.dataset_id);this.set("config",p.ConfigSettingCollection.from_config_dict(u.prefs));this.get("config").add([{key:"name",value:this.get("name")},{key:"color"}]);var v=this.get("preloaded_data");if(v){v=v.data}else{v=[]}this.set("data_manager",new d({dataset:this,init_data:v}))}});var o=Backbone.RelationalModel.extend({defaults:{title:"",type:""},url:galaxy_paths.get("visualization_url"),save:function(){return $.ajax({url:this.url(),type:"POST",dataType:"json",data:{vis_json:JSON.stringify(this)}})}});var k=o.extend({defaults:s.extend({},o.prototype.defaults,{dbkey:"",tracks:null,bookmarks:null,viewport:null}),relations:[{type:Backbone.HasMany,key:"tracks",relatedModel:t}],add_tracks:function(u){this.get("tracks").add(u)}});var b=Backbone.Model.extend({});var h=Backbone.Router.extend({initialize:function(v){this.view=v.view;this.route(/([\w]+)$/,"change_location");this.route(/([\w]+\:[\d,]+-[\d,]+)$/,"change_location");var u=this;u.view.on("navigate",function(w){u.navigate(w)})},change_location:function(u){this.view.go_to(u)}});return{BackboneTrack:t,BrowserBookmark:e,BrowserBookmarkCollection:r,Cache:q,CanvasManager:f,Genome:c,GenomeDataManager:d,GenomeRegion:g,GenomeRegionCollection:m,GenomeVisualization:k,ReferenceTrackDataManager:n,TrackBrowserRouter:h,TrackConfig:b,Visualization:o,select_datasets:a}});
\ No newline at end of file
diff -r 40a7ef4efb22afdc6bde4bce4e75d691e4ac7a7f -r 09c81e81952d4768d63fde4840f31958d8e23041 templates/grid_base.mako
--- a/templates/grid_base.mako
+++ b/templates/grid_base.mako
@@ -129,6 +129,19 @@
cur_page: ${cur_page_num},
num_pages: ${num_pages}
});
+
+ // Initialize grid objects on load.
+ // FIXME: use a grid view object eventually.
+ $(document).ready(function() {
+ init_grid_elements();
+ init_grid_controls();
+
+ // Initialize text filters to select text on click and use normal font when user is typing.
+ $('input[type=text]').each(function() {
+ $(this).click(function() { $(this).select(); } )
+ .keyup(function () { $(this).css("font-style", "normal"); });
+ });
+ });
</script></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0