galaxy-commits
Threads by month
- ----- 2026 -----
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: greg: Properly handle updates which have become available for tool shed repositories while they were ninstalled when reinstalling them.
by Bitbucket 16 Jan '13
by Bitbucket 16 Jan '13
16 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e6302ee56ed0/
changeset: e6302ee56ed0
user: greg
date: 2013-01-16 21:39:22
summary: Properly handle updates which have become available for tool shed repositories while they were ninstalled when reinstalling them.
affected #: 8 files
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -596,8 +596,7 @@
# In this case, a record for the repository will exist in the database with the status of 'New'.
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
- installed_rd, missing_rd = \
- get_installed_and_missing_repository_dependencies( trans, repository )
+ installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository )
else:
installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -66,7 +66,7 @@
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
def build_readme_files_dict( metadata, tool_path=None ):
- """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received repository_metadata."""
+ """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata."""
readme_files_dict = {}
if metadata:
if 'readme_files' in metadata:
@@ -128,10 +128,14 @@
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Installed repository dependencies container.
if repository_dependencies:
+ if new_install:
+ label = 'Repository dependencies'
+ else:
+ label = 'Installed repository dependencies'
folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
folder_id=folder_id,
repository_dependencies=repository_dependencies,
- label='Installed repository dependencies',
+ label=label,
installed=True )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
# Missing repository dependencies container.
@@ -777,17 +781,20 @@
sa_session.flush()
return tool_shed_repository
def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
- repository_metadata=None, metadata=None, repository_dependencies=None ):
+ repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
"""
Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy in two places:
- 1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
- repository and repository_metadata will be objects.
- 2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
- repository_metadata will be None, but metadata will be the tool_shed_repository metadata on the Galaxy side, and the repository_dependencies will
- be an object previously retrieved from the tool shed.
+ This method is called from Galaxy unser three scenarios:
+ 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received
+ repository and repository_metadata will be objects., but tool_dependencies and repository_dependencies will be None
+ 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both
+ repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the
+ tool shed if the repository includes definitions for them.
+ 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this
+ method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects
+ but tool_dependencies and repository_dependencies will be None.
"""
repo_info_dict = {}
repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
@@ -806,27 +813,24 @@
all_repository_dependencies=None,
handled_key_rd_dicts=None,
circular_repository_dependencies=None )
- if metadata:
- tool_dependencies = metadata.get( 'tool_dependencies', None )
- if tool_dependencies:
- new_tool_dependencies = {}
- for dependency_key, requirements_dict in tool_dependencies.items():
- if dependency_key in [ 'set_environment' ]:
- new_set_environment_dict_list = []
- for set_environment_dict in requirements_dict:
- set_environment_dict[ 'repository_name' ] = repository_name
- set_environment_dict[ 'repository_owner' ] = repository_owner
- set_environment_dict[ 'changeset_revision' ] = changeset_revision
- new_set_environment_dict_list.append( set_environment_dict )
- new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
- else:
- requirements_dict[ 'repository_name' ] = repository_name
- requirements_dict[ 'repository_owner' ] = repository_owner
- requirements_dict[ 'changeset_revision' ] = changeset_revision
- new_tool_dependencies[ dependency_key ] = requirements_dict
- tool_dependencies = new_tool_dependencies
- else:
- tool_dependencies = None
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ if tool_dependencies:
+ new_tool_dependencies = {}
+ for dependency_key, requirements_dict in tool_dependencies.items():
+ if dependency_key in [ 'set_environment' ]:
+ new_set_environment_dict_list = []
+ for set_environment_dict in requirements_dict:
+ set_environment_dict[ 'repository_name' ] = repository_name
+ set_environment_dict[ 'repository_owner' ] = repository_owner
+ set_environment_dict[ 'changeset_revision' ] = changeset_revision
+ new_set_environment_dict_list.append( set_environment_dict )
+ new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
+ else:
+ requirements_dict[ 'repository_name' ] = repository_name
+ requirements_dict[ 'repository_owner' ] = repository_owner
+ requirements_dict[ 'changeset_revision' ] = changeset_revision
+ new_tool_dependencies[ dependency_key ] = requirements_dict
+ tool_dependencies = new_tool_dependencies
# Cast unicode to string.
repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
str( repository_clone_url ),
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1406,7 +1406,7 @@
a local Galaxy instance.
"""
includes_tools = False
- includes_repository_dependencies = False
+ has_repository_dependencies = False
includes_tool_dependencies = False
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
@@ -1417,8 +1417,8 @@
metadata = repository_metadata.metadata
if not includes_tools and 'tools' in metadata:
includes_tools = True
- if not includes_repository_dependencies and 'repository_dependencies' in metadata:
- includes_repository_dependencies = True
+ if not has_repository_dependencies and 'repository_dependencies' in metadata:
+ has_repository_dependencies = True
if not includes_tool_dependencies and 'tool_dependencies' in metadata:
includes_tool_dependencies = True
repo_dir = repository.repo_path( trans.app )
@@ -1431,11 +1431,12 @@
repository_owner=repository.user.username,
repository_name=repository.name,
repository=repository,
- metadata=None,
- repository_metadata=repository_metadata )
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
repo_info_dicts=repo_info_dicts )
@web.json
@@ -1465,10 +1466,6 @@
def get_tool_dependencies( self, trans, **kwd ):
"""Handle a request from a Galaxy instance."""
params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- # If the request originated with the UpdateManager, it will not include a galaxy_url.
- galaxy_url = kwd.get( 'galaxy_url', '' )
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
@@ -1478,10 +1475,8 @@
break
metadata = downloadable_revision.metadata
tool_dependencies = metadata.get( 'tool_dependencies', '' )
- from_install_manager = kwd.get( 'from_install_manager', False )
- if from_install_manager:
- if tool_dependencies:
- return encoding_util.tool_shed_encode( tool_dependencies )
+ if tool_dependencies:
+ return encoding_util.tool_shed_encode( tool_dependencies )
return ''
@web.expose
def get_tool_versions( self, trans, **kwd ):
@@ -1506,6 +1501,53 @@
if tool_version_dicts:
return json.to_json_string( tool_version_dicts )
return ''
+ @web.json
+ def get_updated_repository_information( self, trans, name, owner, changeset_revision, **kwd ):
+ """Generate a disctionary that contains the information about a repository that is necessary for installing it into a local Galaxy instance."""
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
+ repository_id = trans.security.encode_id( repository.id )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
+ metadata = repository_metadata.metadata
+ if metadata:
+ readme_files_dict = suc.build_readme_files_dict( metadata )
+ if 'tools' in metadata:
+ includes_tools = True
+ else:
+ includes_tools = False
+ else:
+ readme_files_dict = None
+ includes_tools = False
+ # See if the repo_info_dict was populated with repository_dependencies or tool_dependencies.
+ for name, repo_info_tuple in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if repository_dependencies:
+ has_repository_dependencies = True
+ else:
+ has_repository_dependencies = False
+ if tool_dependencies:
+ includes_tool_dependencies = True
+ else:
+ includes_tool_dependencies = False
+ return dict( includes_tools=includes_tools,
+ has_repository_dependencies=has_repository_dependencies,
+ includes_tool_dependencies=includes_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repo_info_dict=repo_info_dict )
def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
encoded_id = trans.security.encode_id( repository.id )
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -485,10 +485,14 @@
sub_folder.repository_dependencies.append( repository_dependency )
for repository_dependency in rd_value:
if trans.webapp.name == 'galaxy':
- # We have two extra items in the tuple, repository.id and repository.status.
- tool_shed_repository_id = repository_dependency[ 4 ]
- installation_status = repository_dependency[ 5 ]
- repository_dependency = repository_dependency[ 0:4 ]
+ if len( repository_dependency ) == 6:
+ # We have two extra items in the tuple, repository.id and repository.status.
+ tool_shed_repository_id = repository_dependency[ 4 ]
+ installation_status = repository_dependency[ 5 ]
+ repository_dependency = repository_dependency[ 0:4 ]
+ else:
+ tool_shed_repository_id = None
+ installation_status = 'unknown'
else:
tool_shed_repository_id = None
installation_status = None
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -711,8 +711,7 @@
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
- ( tool_shed_url, repository_name, changeset_revision )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s' % ( tool_shed_url, repository_name, changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
response.close()
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -374,21 +374,41 @@
if repository.uninstalled:
# Since we're reinstalling the repository we need to find the latest changeset revision to which it can be updated so that we
# can reset the metadata if necessary. This will ensure that information about repository dependencies and tool dependencies
- # will be current.
+ # will be current. Only allow selecting a different section in the tool panel if the repository was uninstalled.
current_changeset_revision, current_ctx_rev, includes_tools, has_repository_dependencies = \
shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository )
if current_ctx_rev == repository.ctx_rev:
- includes_tools = repository.includes_tools
- has_repository_dependencies = repository.has_repository_dependencies
- if includes_tools or has_repository_dependencies:
- # Only allow selecting a different section in the tool panel if the repository was uninstalled.
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='reselect_tool_panel_section',
- **kwd ) )
+ # The uninstalled repository is current.
+ if repository.includes_tools or repository.has_repository_dependencies:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reselect_tool_panel_section',
+ **kwd ) )
+ else:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reinstall_repository',
+ **kwd ) )
else:
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='reinstall_repository',
- **kwd ) )
+ # The uninstalled repository has updates available in the tool shed.
+ updated_repo_info_dict = self.get_updated_repository_information( trans=trans,
+ repository_id=trans.security.encode_id( repository.id ),
+ repository_name=repository.name,
+ repository_owner=repository.owner,
+ changeset_revision=current_changeset_revision )
+ has_repository_dependencies = updated_repo_info_dict.get( 'has_repository_dependencies', False )
+ includes_tool_dependencies = updated_repo_info_dict.get( 'includes_tool_dependencies', False )
+ if has_repository_dependencies or includes_tool_dependencies:
+ json_repo_info_dict = json.to_json_string( updated_repo_info_dict )
+ encoded_repo_info_dict = encoding_util.tool_shed_encode( json_repo_info_dict )
+ kwd[ 'latest_changeset_revision' ] = current_changeset_revision
+ kwd[ 'latest_ctx_rev' ] = current_ctx_rev
+ kwd[ 'updated_repo_info_dict' ] = encoded_repo_info_dict
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reselect_tool_panel_section',
+ **kwd ) )
+ else:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reinstall_repository',
+ **kwd ) )
else:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='activate_repository',
@@ -581,6 +601,45 @@
else:
text = ''
return text
+ @web.expose
+ @web.require_admin
+ def get_tool_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+ """
+ Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for the received repository name,
+ owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We need
+ it so that we can derive the tool shed from which it was installed.
+ """
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ if len( raw_text ) > 2:
+ encoded_text = json.from_json_string( raw_text )
+ text = encoding_util.tool_shed_decode( encoded_text )
+ else:
+ text = ''
+ return text
+ @web.expose
+ @web.require_admin
+ def get_updated_repository_information( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+ """
+ Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall an updated revision of an
+ uninstalled tool shed repository.
+ """
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_updated_repository_information?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ repo_information_dict = json.from_json_string( raw_text )
+ return repo_information_dict
def get_versions_of_tool( self, app, guid ):
tool_version = shed_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@@ -1149,7 +1208,7 @@
status = kwd.get( 'status', 'done' )
tool_shed_url = kwd[ 'tool_shed_url' ]
# Handle repository dependencies.
- includes_repository_dependencies = util.string_as_bool( kwd.get( 'includes_repository_dependencies', False ) )
+ has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) )
install_repository_dependencies = kwd.get( 'install_repository_dependencies', '' )
# Every repository will be installed into the same tool panel section or all will be installed outside of any sections.
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
@@ -1173,12 +1232,12 @@
response.close()
repo_information_dict = json.from_json_string( raw_text )
includes_tools = util.string_as_bool( repo_information_dict.get( 'includes_tools', False ) )
- includes_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_repository_dependencies', False ) )
+ has_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'has_repository_dependencies', False ) )
includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) )
repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
- if ( not includes_tools and not includes_repository_dependencies ) or \
- ( ( includes_tools or includes_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
+ if ( not includes_tools and not has_repository_dependencies ) or \
+ ( ( includes_tools or has_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
if includes_tools:
shed_tool_conf = kwd[ 'shed_tool_conf' ]
@@ -1241,7 +1300,7 @@
for tsr in created_or_updated_tool_shed_repositories:
tool_panel_section_keys.append( tool_panel_section_key )
new_kwd = dict( includes_tools=includes_tools,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
install_repository_dependencies=install_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
install_tool_dependencies=install_tool_dependencies,
@@ -1331,7 +1390,7 @@
includes_tools=includes_tools,
includes_tool_dependencies=includes_tool_dependencies,
install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
new_tool_panel_section=new_tool_panel_section,
containers_dict=containers_dict,
@@ -1364,7 +1423,7 @@
tool_panel_section_key = None
tool_panel_section_keys = []
metadata = tool_shed_repository.metadata
- # Keep track of tool dependencies define dfor the current repository or those defined for any of it's repository dependencies.
+ # Keep track of tool dependencies defined for the current repository or those defined for any of it's repository dependencies.
includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
if tool_shed_repository.includes_tools:
# Handle the selected tool panel location for loading tools included in the tool shed repository.
@@ -1381,7 +1440,7 @@
tool_shed_repository.installed_changeset_revision,
tool_shed_repository.ctx_rev,
repository_clone_url,
- tool_shed_repository.metadata,
+ metadata,
trans.model.ToolShedRepository.installation_status.NEW,
tool_shed_repository.installed_changeset_revision,
tool_shed_repository.owner,
@@ -1402,6 +1461,10 @@
changeset_revision=tool_shed_repository.changeset_revision )
else:
repository_dependencies = None
+ if metadata:
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ else:
+ tool_dependencies = None
repo_info_dict = suc.create_repo_info_dict( trans=trans,
repository_clone_url=repository_clone_url,
changeset_revision=tool_shed_repository.changeset_revision,
@@ -1410,15 +1473,15 @@
repository_name=tool_shed_repository.name,
repository=None,
repository_metadata=None,
- metadata=metadata,
+ tool_dependencies=tool_dependencies,
repository_dependencies=repository_dependencies )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
+ shed_util.create_repository_dependency_objects( trans=trans,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
reinstalling=True,
install_repository_dependencies=install_repository_dependencies,
no_changes_checked=no_changes_checked,
@@ -1472,40 +1535,62 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
- """Select or change the tool panel section to contain the tools included in the tool shed repositories being reinstalled."""
+ """
+ Select or change the tool panel section to contain the tools included in the tool shed repository being reinstalled. If there are updates
+ available for the repository in the tool shed, the tool_dependencies and repository_dependencies associated with the updated changeset revision
+ will have been retrieved from the tool shed and passed in the received kwd. In this case, the stored tool shed repository metqdata from the
+ Galaxy database will not be used since it is outdated.
+ """
message = ''
- repository_id = kwd[ 'id' ]
+ repository_id = kwd.get( 'id', None )
+ latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
+ latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
metadata = tool_shed_repository.metadata
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app )
- repository_dependencies = self.get_repository_dependencies( trans=trans,
- repository_id=repository_id,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- changeset_revision=tool_shed_repository.changeset_revision )
- if repository_dependencies:
- includes_repository_dependencies = True
+ if latest_changeset_revision and latest_ctx_rev:
+ # There are updates available in the tool shed for the repository, so use the receieved dependency information which was retrieved from
+ # the tool shed.
+ encoded_updated_repo_info_dict = kwd.get( 'updated_repo_info_dict', None )
+ updated_repo_info_dict = encoding_util.tool_shed_decode( encoded_updated_repo_info_dict )
+ readme_files_dict = updated_repo_info_dict.get( 'readme_files_dict', None )
+ includes_tools = updated_repo_info_dict.get( 'includes_tools', False )
+ has_repository_dependencies = updated_repo_info_dict.get( 'has_repository_dependencies', False )
+ includes_tool_dependencies = updated_repo_info_dict.get( 'includes_tool_dependencies', False )
+ repo_info_dict = updated_repo_info_dict[ 'repo_info_dict' ]
else:
- includes_repository_dependencies = False
- includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
- repo_info_dict = suc.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- metadata=metadata,
- repository_dependencies=repository_dependencies )
- if includes_repository_dependencies:
- # Discover all repository dependencies and retrieve information for installing them.
- required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # There are no updates available from the tool shed for the repository, so use it's locally stored metadata.
+ if metadata:
+ readme_files_dict = suc.build_readme_files_dict( metadata )
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ else:
+ readme_files_dict = None
+ tool_dependencies = None
+ includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.changeset_revision )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ ctx_rev=tool_shed_repository.ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
+ repository_name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \
+ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \
+ shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ if installed_repository_dependencies or missing_repository_dependencies:
+ has_repository_dependencies = True
else:
- required_repo_info_dicts = None
+ has_repository_dependencies = False
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -1533,14 +1618,14 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- # Populate the containers_dict from the metadata for the tool shed repository we're reinstalling, but make sure to include tool dependencies defined for
- # all of the repository's repository dependencies.
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- repository=tool_shed_repository,
- reinstalling=True,
- required_repo_info_dicts=required_repo_info_dicts )
+ containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
@@ -1561,7 +1646,7 @@
no_changes_check_box=no_changes_check_box,
original_section_name=original_section_name,
includes_tool_dependencies=includes_tool_dependencies,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
install_tool_dependencies_check_box=install_tool_dependencies_check_box,
containers_dict=containers_dict,
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -33,7 +33,7 @@
${render_readme_section( containers_dict )}
<div style="clear: both"></div>
%endif
- %if includes_repository_dependencies or includes_tool_dependencies:
+ %if has_repository_dependencies or includes_tool_dependencies:
<div class="form-row"><table class="colored" width="100%"><th bgcolor="#EBD9B2">Confirm dependency installation</th>
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -17,10 +17,10 @@
<%
# Handle the case where an uninstalled repository encountered errors during the process of being reinstalled. In
- # this case, the repository metadata is an empty dictionary, but one or both of includes_repository_dependencies
+ # this case, the repository metadata is an empty dictionary, but one or both of has_repository_dependencies
# and includes_tool_dependencies may be True. If either of these are True but we have no metadata, we cannot install
# repository dependencies on this pass.
- if includes_repository_dependencies:
+ if has_repository_dependencies:
repository_dependencies = containers_dict[ 'repository_dependencies' ]
missing_repository_dependencies = containers_dict[ 'missing_repository_dependencies' ]
if repository_dependencies or missing_repository_dependencies:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Fix missing import that was breaking unit tests.
by Bitbucket 16 Jan '13
by Bitbucket 16 Jan '13
16 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f199b236ceb0/
changeset: f199b236ceb0
user: inithello
date: 2013-01-16 21:14:36
summary: Fix missing import that was breaking unit tests.
affected #: 1 file
diff -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 -r f199b236ceb08e9588351223b6277861e9f87325 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -13,7 +13,7 @@
from galaxy.datatypes import metadata
from galaxy.datatypes.checkers import is_gzip
from galaxy.datatypes.metadata import MetadataElement
-from galaxy.datatypes.sniff import get_headers
+from galaxy.datatypes.sniff import get_headers, get_test_fname
from galaxy.util.json import to_json_string
log = logging.getLogger(__name__)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional test enhancements. Tests for installing repositories via the tool search. Tests for reviewing repository components.
by Bitbucket 16 Jan '13
by Bitbucket 16 Jan '13
16 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3a2211dc42f1/
changeset: 3a2211dc42f1
user: inithello
date: 2013-01-16 19:19:45
summary: Tool shed functional test enhancements. Tests for installing repositories via the tool search. Tests for reviewing repository components.
affected #: 16 files
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/base/common.py
--- a/test/tool_shed/base/common.py
+++ b/test/tool_shed/base/common.py
@@ -8,6 +8,16 @@
test_user_1_email = 'test-1(a)bx.psu.edu'
test_user_1_name = 'user1'
+test_user_2 = None
+test_user_2_private_role = None
+test_user_2_email = 'test-2(a)bx.psu.edu'
+test_user_2_name = 'user2'
+
+test_user_3 = None
+test_user_3_private_role = None
+test_user_3_email = 'test-3(a)bx.psu.edu'
+test_user_3_name = 'user3'
+
new_repository_dependencies_xml = '''<?xml version="1.0"?><repositories${description}>
${dependency_lines}
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -61,6 +61,17 @@
return sa_session.query( model.Repository ) \
.filter( model.Repository.table.c.id == repository_id ) \
.first()
+def get_repository_review_by_user_id_changeset_revision( user_id, repository_id, changeset_revision ):
+ review = sa_session.query( model.RepositoryReview ) \
+ .filter( and_( model.RepositoryReview.table.c.user_id == user_id,
+ model.RepositoryReview.table.c.repository_id == repository_id,
+ model.RepositoryReview.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+ return review
+def get_role_by_name( role_name ):
+ return sa_session.query( model.Role ) \
+ .filter( model.Role.table.c.name == role_name ) \
+ .first()
def get_user( email ):
return sa_session.query( model.User ) \
.filter( model.User.table.c.email==email ) \
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -35,11 +35,19 @@
self.galaxy_tool_dependency_dir = None # os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
self.home()
+ def add_repository_review_component( self, **kwd ):
+ url = '/repository_review/create_component?operation=create'
+ self.visit_url( url )
+ self.submit_form( 1, 'create_component_button', **kwd )
def browse_category( self, category, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_valid_categories?sort=name&operation=valid_repositories_by_category&id=%s' % \
self.security.encode_id( category.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def browse_component_review( self, review, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository_review/browse_review?id=%s' % self.security.encode_id( review.id )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
@@ -173,6 +181,23 @@
self.submit_form( form_no=1, button="create_category_button", **kwd )
return test_db_util.get_category_by_name( kwd[ 'name' ] )
def create_checkbox_query_string( self, field_name, value ):
+ '''
+ From galaxy.web.form_builder.CheckboxField:
+ The hidden field is necessary because if the check box is not checked on the form, it will
+ not be included in the request params. The hidden field ensure that this will happen. When
+ parsing the request, the value 'true' in the hidden field actually means it is NOT checked.
+ See the is_checked() method below. The prefix is necessary in each case to ensure functional
+ correctness when the param is inside a conditional.
+
+ This may look strange upon initial inspection, but see the comments in the get_html() method
+ above for clarification. Basically, if value is not True, then it will always be a list with
+ 2 input fields ( a checkbox and a hidden field ) if the checkbox is checked. If it is not
+ checked, then value will be only the hidden field.
+
+ The create_checkbox_query_string method emulates the described behavior with URL query parameters.
+ This is currently necessary because twill does not correctly parse certain forms, so the test
+ method has to visit the intended form target "manually".
+ '''
field_value = str( value ).lower()
if value:
return '%s=%s&%s=%s' % ( field_name, field_value, field_name, field_value )
@@ -187,6 +212,27 @@
'repository_dependencies.xml',
filepath=filepath,
commit_message='Uploaded dependency on %s.' % ', '.join( repo.name for repo in depends_on ) )
+ def create_repository_review( self, repository, review_contents_dict, changeset_revision=None, copy_from=None):
+ strings_displayed = []
+ if not copy_from:
+ strings_displayed.append( 'Begin your review' )
+ strings_not_displayed = []
+ kwd = dict()
+ if not changeset_revision:
+ changeset_revision = self.get_repository_tip( repository )
+ url = '/repository_review/create_review?changeset_revision=%s&id=%s' % ( changeset_revision, self.security.encode_id( repository.id ) )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ strings_displayed = []
+ if copy_from:
+ old_changeset_revision, review_id = copy_from
+ strings_displayed = [ 'You have elected to create a new review', 'Select previous revision', changeset_revision ]
+ self.check_for_strings( strings_displayed )
+ strings_displayed = []
+ url = '/repository_review/create_review?changeset_revision=%s&id=%s&previous_review_id=%s' % \
+ ( self.get_repository_tip( repository ), self.security.encode_id( repository.id ), self.security.encode_id( review_id ) )
+ self.visit_url( url )
+ self.fill_review_form( review_contents_dict, strings_displayed, strings_not_displayed )
def create_user_in_galaxy( self, cntrller='user', email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
self.visit_galaxy_url( "/user/create?cntrller=%s&use_panels=False" % cntrller )
tc.fv( '1', 'email', email )
@@ -293,6 +339,10 @@
url = '/repository/get_file_contents?file_path=%s' % os.path.join( relative_path, filename )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def display_reviewed_repositories_owned_by_user( self, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository_review/reviewed_repositories_i_own'
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def edit_repository_categories( self, repository, categories_to_add=[], categories_to_remove=[], restore_original=True ):
url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
@@ -317,6 +367,10 @@
strings_not_displayed.append( "selected>%s" % category )
tc.submit( "manage_categories_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def display_repository_reviews_by_user( self, user, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository_review/repository_reviews_by_user?id=%s' % self.security.encode_id( user.id )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def edit_repository_information( self, repository, **kwd ):
url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
@@ -343,6 +397,21 @@
else:
string = string.replace( character, replacement )
return string
+ def fill_review_form( self, review_contents_dict, strings_displayed=[], strings_not_displayed=[] ):
+ kwd = dict()
+ for label, contents in review_contents_dict.items():
+ strings_displayed.append( label )
+ if contents:
+ kwd[ '%s__ESEP__comment' % label ] = contents[ 'comment' ]
+ kwd[ '%s__ESEP__rating' % label ] = contents[ 'rating' ]
+ if 'private' in contents:
+ kwd[ '%s__ESEP__private' % label ] = contents[ 'private' ]
+ kwd[ '%s__ESEP__approved' % label ] = contents[ 'approved' ]
+ else:
+ kwd[ '%s__ESEP__approved' % label ] = 'not_applicable'
+ self.submit_form( 1, 'Workflows__ESEP__review_button', **kwd )
+ strings_displayed.append( 'Reviews were saved' )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def galaxy_login( self, email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
previously_created, username_taken, invalid_username = \
self.create_user_in_galaxy( email=email, password=password, username=username, redirect=redirect )
@@ -409,7 +478,7 @@
raise Exception( "Entry for repository %s missing in hgweb config file %s." % ( lhs, self.hgweb_config_manager.hgweb_config ) )
def get_repository_changelog( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
- return [repo.changectx( changeset ) for changeset in repo.changelog ]
+ return [ ( repo.changectx( changeset ), changeset ) for changeset in repo.changelog ]
def get_repository_datatypes_count( self, repository ):
metadata = self.get_repository_metadata( repository )[0].metadata
if 'datatypes' not in metadata:
@@ -469,6 +538,22 @@
if include_invalid and 'invalid_tools' in repository_metadata.metadata:
invalid_tools.append( dict( tools=repository_metadata.metadata[ 'invalid_tools' ], changeset_revision=repository_metadata.changeset_revision ) )
return valid_tools, invalid_tools
+ def grant_role_to_user( self, user, role ):
+ strings_displayed = [ self.security.encode_id( role.id ), role.name ]
+ strings_not_displayed = []
+ self.visit_url( '/admin/roles' )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ url = '/admin/roles?operation=manage+users+and+groups&id=%s' % self.security.encode_id( role.id )
+ self.visit_url( url )
+ strings_displayed = [ common.test_user_1_email, common.test_user_2_email ]
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ # As elsewhere, twill limits the possibility of submitting the form, this time due to not executing the javascript
+ # attached to the role selection form. Visit the action url directly with the necessary parameters.
+ url = '/admin/manage_users_and_groups_for_role?id=%s&in_users=%d&operation=manage+users+and+groups&role_members_edit_button=Save' % \
+ ( self.security.encode_id( role.id ), user.id )
+ self.visit_url( url )
+ strings_displayed = [ "Role '%s' has been updated" % role.name ]
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def grant_write_access( self, repository, usernames=[], strings_displayed=[], strings_not_displayed=[] ):
self.display_manage_repository_page( repository )
tc.fv( "3", "allow_push", '-Select one' )
@@ -508,10 +593,38 @@
( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
self.visit_galaxy_url( url )
return util.listify( repository_ids )
+ def install_repositories_from_search_results( self, repositories, strings_displayed=[], strings_not_displayed=[], **kwd ):
+ '''
+ Normally, it would be possible to check the appropriate boxes in the search results, and click the install button. This works
+ in a browser, but Twill manages to lose the 'toolshedgalaxyurl' cookie between one page and the next, so it's necessary to work
+ around this by explicitly visiting the prepare_for_install method on the Galaxy side.
+ '''
+ url = '/admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+ ( self.url, ','.join( self.security.encode_id( repository.id ) for repository in repositories ), \
+ ','.join( self.get_repository_tip( repository ) for repository in repositories ) )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ if 'install_tool_dependencies' in self.last_page():
+ form = tc.browser.get_form( 'select_tool_panel_section' )
+ checkbox = form.find_control( id="install_tool_dependencies" )
+ checkbox.disabled = False
+ if 'install_tool_dependencies' in kwd:
+ install_tool_dependencies = kwd[ 'install_tool_dependencies' ]
+ del kwd[ 'install_tool_dependencies' ]
+ else:
+ install_tool_dependencies = False
+ if install_tool_dependencies:
+ checkbox.selected = True
+ else:
+ checkbox.selected = False
+ self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
+ repository_ids = self.initiate_installation_process()
+ self.wait_for_repository_installation( repository_ids )
def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
install_repository_dependencies=True, changeset_revision=None,
strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[],
- post_submit_strings_displayed=[], new_tool_panel_section=None, **kwd ):
+ post_submit_strings_displayed=[], new_tool_panel_section=None, includes_tools=True,
+ **kwd ):
self.browse_tool_shed( url=self.url )
self.browse_category( test_db_util.get_category_by_name( category_name ) )
self.preview_repository_in_tool_shed( name, owner, strings_displayed=preview_strings_displayed )
@@ -539,8 +652,9 @@
kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
if new_tool_panel_section:
kwd[ 'new_tool_panel_section' ] = new_tool_panel_section
- self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
- self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
+ if includes_tools:
+ self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
+ self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
repository_ids = self.initiate_installation_process( new_tool_panel_section=new_tool_panel_section )
self.wait_for_repository_installation( repository_ids )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
@@ -563,6 +677,10 @@
( self.security.encode_id( metadata.id ), tool_shed_encode( workflow_name ) )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def manage_review_components( self, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/repository_review/manage_components'
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
repository = test_db_util.get_repository_by_name_and_owner( name, owner )
if not changeset_revision:
@@ -587,31 +705,19 @@
install_repository_dependencies=True,
install_tool_dependencies=False,
no_changes=True,
- new_tool_panel_section='' ):
+ new_tool_panel_section='',
+ strings_displayed=[],
+ strings_not_displayed=[] ):
url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
- # From galaxy.web.form_builder.CheckboxField:
- # The hidden field is necessary because if the check box is not checked on the form, it will
- # not be included in the request params. The hidden field ensure that this will happen. When
- # parsing the request, the value 'true' in the hidden field actually means it is NOT checked.
- # See the is_checked() method below. The prefix is necessary in each case to ensure functional
- # correctness when the param is inside a conditional.
- #
- # This may look strange upon initial inspection, but see the comments in the get_html() method
- # above for clarification. Basically, if value is not True, then it will always be a list with
- # 2 input fields ( a checkbox and a hidden field ) if the checkbox is checked. If it is not
- # checked, then value will be only the hidden field.
- #
- # The create_checkbox_query_string method emulates the described behavior with URL query parameters.
- # This is currently necessary because twill does not correctly parse the reselect tool panel section
- # form, so the test method has to visit the intended form target "manually".
+ self.check_for_strings( strings_displayed, strings_not_displayed=[] )
repo_dependencies = self.create_checkbox_query_string( field_name='install_repository_dependencies', value=install_repository_dependencies )
tool_dependencies = self.create_checkbox_query_string( field_name='install_tool_dependencies', value=install_tool_dependencies )
encoded_repository_id = self.security.encode_id( installed_repository.id )
url = '/admin_toolshed/reinstall_repository?id=%s&%s&%s&no_changes=%s&new_tool_panel_section=%s' % \
( encoded_repository_id, repo_dependencies, tool_dependencies, str( no_changes ), new_tool_panel_section )
self.visit_galaxy_url( url )
- # Then manually initiate the install process, as with installing a repository. See comments in the
+ # Manually initiate the install process, as with installing a repository. See comments in the
# initiate_installation_process method for details.
repository_ids = self.initiate_installation_process( install_tool_dependencies,
install_repository_dependencies,
@@ -639,13 +745,28 @@
url = '/repository/reset_all_metadata?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( [ 'All repository metadata has been reset.' ] )
+ def review_repository( self, repository, review_contents_dict, user=None, changeset_revision=None ):
+ strings_displayed = []
+ strings_not_displayed = []
+ kwd = dict()
+ if not changeset_revision:
+ changeset_revision = self.get_repository_tip( repository )
+ if user:
+ review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
+ url = '/repository_review/edit_review?id=%s' % self.security.encode_id( review.id )
+ self.visit_url( url )
+ self.fill_review_form( review_contents_dict, strings_displayed, strings_not_displayed )
def revoke_write_access( self, repository, username ):
url = '/repository/manage_repository?user_access_button=Remove&id=%s&remove_auth=%s' % \
( self.security.encode_id( repository.id ), username )
self.visit_url( url )
- def search_for_valid_tools( self, search_fields={}, exact_matches=False, strings_displayed=[], strings_not_displayed=[] ):
+ def search_for_valid_tools( self, search_fields={}, exact_matches=False, strings_displayed=[], strings_not_displayed=[], from_galaxy=False ):
+ if from_galaxy:
+ galaxy_url = '?galaxy_url=%s' % self.galaxy_url
+ else:
+ galaxy_url = ''
for field_name, search_string in search_fields.items():
- url = '/repository/find_tools'
+ url = '/repository/find_tools%s' % galaxy_url
self.visit_url( url )
tc.fv( "1", "exact_matches", exact_matches )
tc.fv( "1", field_name, search_string )
@@ -746,6 +867,17 @@
found = True
break
assert found, 'No entry for %s in %s.' % ( data_table, self.shed_tool_data_table_conf )
+ def verify_repository_reviews( self, repository, reviewer=None, strings_displayed=[], strings_not_displayed=[] ):
+ changeset_revision = self.get_repository_tip( repository )
+ # Verify that the currently logged in user has a repository review for the specified repository, reviewer, and changeset revision.
+ strings_displayed=[ repository.name, reviewer.username, changeset_revision ]
+ self.display_reviewed_repositories_owned_by_user( strings_displayed=strings_displayed )
+ # Verify that the reviewer has reviewed the specified repository's changeset revision.
+ strings_displayed=[ repository.name, repository.description, changeset_revision ]
+ self.display_repository_reviews_by_user( reviewer, strings_displayed=strings_displayed )
+ # Load the review and check for the components passed in strings_displayed.
+ review = test_db_util.get_repository_review_by_user_id_changeset_revision( reviewer.id, repository.id, changeset_revision )
+ self.browse_component_review( review, strings_displayed=strings_displayed )
def verify_tool_metadata_for_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
repository_id = self.security.encode_id( installed_repository.id )
for tool in installed_repository.metadata[ 'tools' ]:
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -6,7 +6,7 @@
repository_long_description = "Long description of Galaxy's filtering tool for test 0000"
class TestBasicRepositoryFeatures( ShedTwillTestCase ):
-
+ '''Test core repository features.'''
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -10,7 +10,7 @@
filtering_repository_long_description = "Long description of Galaxy's filtering tool"
class TestRepositoryCircularDependencies( ShedTwillTestCase ):
- '''Verify that the code correctly handles circular dependencies.'''
+ '''Verify that the code correctly displays repositories with circular repository dependencies.'''
def test_0000_initiate_users( self ):
"""Create necessary user accounts."""
self.logout()
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
--- a/test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
@@ -169,6 +169,12 @@
freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
bismark_repository = test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
dependency_xml_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+ # convert_chars depends on column_maker
+ # column_maker depends on convert_chars
+ # emboss depends on emboss_datatypes
+ # emboss_datatypes depends on bismark
+ # freebayes depends on freebayes, emboss, emboss_datatypes, and column_maker
+ # filtering depends on emboss
self.create_repository_dependency( convert_repository, depends_on=[ column_repository ], filepath=dependency_xml_path )
self.create_repository_dependency( column_repository, depends_on=[ convert_repository ], filepath=dependency_xml_path )
self.create_repository_dependency( emboss_datatypes_repository, depends_on=[ bismark_repository ], filepath=dependency_xml_path )
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_0060_workflows.py
--- a/test/tool_shed/functional/test_0060_workflows.py
+++ b/test/tool_shed/functional/test_0060_workflows.py
@@ -8,6 +8,7 @@
workflow_name = 'Workflow for 0060_filter_workflow_repository'
class TestToolShedWorkflowFeatures( ShedTwillTestCase ):
+ '''Test valid and invalid workflows.'''
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
self.logout()
@@ -39,7 +40,8 @@
workflow = file( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
workflow = workflow.replace( '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
workflow_filepath = self.generate_temp_path( 'test_0060', additional_paths=[ 'filtering_workflow' ] )
- os.makedirs( workflow_filepath )
+ if not os.path.exists( workflow_filepath ):
+ os.makedirs( workflow_filepath )
file( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
self.upload_file( repository,
workflow_filename,
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_0090_tool_search.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0090_tool_search.py
@@ -0,0 +1,171 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0090'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0090'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+filtering_repository_name = 'filtering_0090'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0090'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+bwa_base_repository_name = 'bwa_base_0090'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "NT space mapping with BWA"
+
+bwa_color_repository_name = 'bwa_color_0090'
+bwa_color_repository_description = "BWA Color"
+bwa_color_repository_long_description = "Color space mapping with BWA"
+
+category_name = 'Test 0090 Tool Search And Installation'
+category_description = 'Test 0090 Tool Search And Installation'
+
+class TestRepositoryCircularDependenciesAgain( ShedTwillTestCase ):
+ '''Test more features related to repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_bwa_base_repository( self ):
+ '''Create and populate bwa_base_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=bwa_base_repository_name,
+ description=bwa_base_repository_description,
+ long_description=bwa_base_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'bwa/bwa_base.tar',
+ strings_displayed=[],
+ commit_message='Uploaded bwa_base.tar.' )
+ def test_0010_create_bwa_color_repository( self ):
+ '''Create and populate bwa_color_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=bwa_color_repository_name,
+ description=bwa_color_repository_description,
+ long_description=bwa_color_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'bwa/bwa_color.tar',
+ strings_displayed=[],
+ commit_message='Uploaded bwa_color.tar.' )
+ def test_0015_create_emboss_datatypes_repository( self ):
+ '''Create and populate emboss_datatypes_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+ description=emboss_datatypes_repository_description,
+ long_description=emboss_datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'emboss/datatypes/datatypes_conf.xml',
+ strings_displayed=[],
+ commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0020_create_emboss_repository( self ):
+ '''Create and populate emboss_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'emboss/emboss.tar',
+ strings_displayed=[],
+ commit_message='Uploaded tool tarball.' )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0090', additional_paths=[ 'emboss' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Emboss depends on the emboss_datatypes repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on emboss_datatypes.' )
+ def test_0025_create_filtering_repository( self ):
+ '''Create and populate filtering_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ filtering_repository = self.get_or_create_repository( name=filtering_repository_name,
+ description=filtering_repository_description,
+ long_description=filtering_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( filtering_repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0090', additional_paths=[ 'filtering' ] )
+ self.generate_repository_dependency_xml( [ emboss_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Filtering depends on the emboss repository.' )
+ self.upload_file( filtering_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on emboss.' )
+ def test_0030_create_freebayes_repository( self ):
+ '''Create and populate freebayes_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ repository = self.get_or_create_repository( name=freebayes_repository_name,
+ description=freebayes_repository_description,
+ long_description=freebayes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ def test_0035_create_and_upload_dependency_definitions( self ):
+ '''Create and upload repository dependency definitions.'''
+ bwa_color_repository = test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+ bwa_base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ dependency_xml_path = self.generate_temp_path( 'test_0090', additional_paths=[ 'freebayes' ] )
+ self.create_repository_dependency( emboss_repository, depends_on=[ emboss_datatypes_repository ], filepath=dependency_xml_path )
+ self.create_repository_dependency( filtering_repository, depends_on=[ freebayes_repository ], filepath=dependency_xml_path )
+ self.create_repository_dependency( bwa_base_repository, depends_on=[ emboss_repository ], filepath=dependency_xml_path )
+ self.create_repository_dependency( bwa_color_repository, depends_on=[ filtering_repository ], filepath=dependency_xml_path )
+ def test_0040_verify_repository_dependencies( self ):
+ '''Verify the generated dependency structure.'''
+ bwa_color_repository = test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+ bwa_base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( emboss_repository, emboss_datatypes_repository )
+ self.check_repository_dependency( filtering_repository, freebayes_repository )
+ self.check_repository_dependency( bwa_base_repository, emboss_repository )
+ self.check_repository_dependency( bwa_color_repository, filtering_repository )
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_0400_repository_component_reviews.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0400_repository_component_reviews.py
@@ -0,0 +1,279 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+repository_name = 'filtering_0400'
+repository_description = 'Galaxy filtering tool for test 0400'
+repository_long_description = 'Long description of Galaxy filtering tool for test 0400'
+
+class TestRepositoryComponentReviews( ShedTwillTestCase ):
+ '''Test repository component review features.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ test_user_2 = test_db_util.get_user( common.test_user_2_email )
+ assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+ test_user_2_private_role = test_db_util.get_private_role( test_user_2 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_grant_reviewer_role( self ):
+ '''Grant the repository reviewer role to test_user_2.'''
+ reviewer_role = test_db_util.get_role_by_name( 'Repository Reviewer' )
+ test_user_2 = test_db_util.get_user( common.test_user_2_email )
+ self.grant_role_to_user( test_user_2, reviewer_role )
+ def test_0010_verify_repository_review_components( self ):
+ '''Ensure that the required review components exist.'''
+ strings_not_displayed=[ 'Repository dependencies' ]
+ self.manage_review_components( strings_not_displayed=strings_not_displayed )
+ self.add_repository_review_component( name='Repository dependencies',
+ description='Repository dependencies defined in a file named repository_dependencies.xml included in the repository' )
+ strings_displayed=[ 'Data types', 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+ self.manage_review_components( strings_displayed=strings_displayed )
+ def test_0015_create_repository( self ):
+ """Create and populate the filtering repository"""
+ category = self.create_category( name='Test 0400 Repository Component Reviews', description='Test 0400 Repository Component Reviews' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
+ 'Repository %s has been created' % "'%s'" % repository_name ]
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository, 'filtering/filtering_1.1.0.tar', commit_message="Uploaded filtering 1.1.0" )
+ def test_0020_review_initial_revision_data_types( self ):
+ '''Review the datatypes component for the current tip revision.'''
+ # Review this revision:
+ # Data types (N/A)
+ # Functional tests (One star, comment 'functional tests missing')
+ # README (N/A)
+ # Repository dependencies (N/A)
+ # Tool dependencies (N/A)
+ # Tools (5 stars, good review)
+ # Workflows (N/A)
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ review_contents_dict = { 'Data types': dict() }
+ self.create_repository_review( repository, review_contents_dict )
+ def test_0025_verify_datatype_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed = [ 'Data types', 'not_applicable' ]
+ strings_not_displayed = [ 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0030_review_initial_revision_functional_tests( self ):
+ '''Review the datatypes component for the current tip revision.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = { 'Functional tests': dict( rating=1, comment='Functional tests missing', approved='no', private='yes' ) }
+ self.review_repository( repository, review_contents_dict, user )
+# def test_0030_verify_review_display( self ):
+# '''Verify that private reviews are restricted to owner and reviewer, and non-private views are viewable by others.'''
+# # Currently not implemented because third parties cannot view reviews whether they are private or not.
+# self.logout()
+# self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+ def test_0035_verify_functional_test_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'Functional tests', 'Functional tests missing', 'no' ]
+ strings_not_displayed = [ 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0040_review_readme( self ):
+ '''Review the readme component for the current tip revision.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = { 'README': dict() }
+ self.review_repository( repository, review_contents_dict, user )
+ def test_0045_verify_readme_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'README', 'not_applicable' ]
+ strings_not_displayed = [ 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0050_review_repository_dependencies( self ):
+ '''Review the repository dependencies component for the current tip revision.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = { 'Repository dependencies': dict() }
+ self.review_repository( repository, review_contents_dict, user )
+ def test_0055_verify_repository_dependency_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'Repository dependencies', 'not_applicable' ]
+ strings_not_displayed = [ 'Tool dependencies', 'Tools', 'Workflows' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0060_review_tool_dependencies( self ):
+ '''Review the tool dependencies component for the current tip revision.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = { 'Tool dependencies': dict() }
+ self.review_repository( repository, review_contents_dict, user )
+ def test_0065_verify_tool_dependency_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'Tool dependencies', 'not_applicable' ]
+ strings_not_displayed = [ 'Tools', 'Workflows' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0070_review_tools( self ):
+ '''Review the tools component for the current tip revision.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = { 'Tools': dict( rating=5, comment='Excellent tool, easy to use.', approved='yes', private='no' ) }
+ self.review_repository( repository, review_contents_dict, test_db_util.get_user( common.test_user_2_email ) )
+ def test_0075_verify_tools_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'Tools', 'yes', 'Excellent tool, easy to use.' ]
+ strings_not_displayed = [ 'Workflows' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0080_review_workflows( self ):
+ '''Review the workflows component for the current tip revision.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = { 'Workflows': dict() }
+ self.review_repository( repository, review_contents_dict, user )
+ def test_0085_verify_workflows_review( self ):
+ '''Verify that the datatypes component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'Workflows', 'not_applicable' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0090_upload_readme_file( self ):
+ '''Upload a readme file to the filtering repository.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ self.upload_file( repository, 'readme.txt', commit_message="Uploaded readme.txt" )
+ def test_0095_review_new_changeset_readme_component( self ):
+ '''Update the filtering repository's readme component review to reflect the presence of the readme file.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ # Get the changeset immediately prior to the tip, and pass it to the create review method.
+ changelog = self.get_repository_changelog( repository )
+ changeset_revision, ctx_revision = changelog[-2]
+ previous_review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, str( changeset_revision ) )
+ review_contents_dict = { 'README': dict( rating=5, comment='Clear and concise readme file, a true pleasure to read.', approved='yes', private='no' ) }
+ self.create_repository_review( repository,
+ review_contents_dict,
+ changeset_revision=self.get_repository_tip( repository ),
+ copy_from=( str( changeset_revision ), previous_review.id ) )
+ def test_0100_verify_readme_review( self ):
+ '''Verify that the readme component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed = [ 'README', 'yes', 'Clear and concise readme file, a true pleasure to read.' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0105_upload_test_data( self ):
+ '''Upload the missing test data to the filtering repository.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ self.upload_file( repository, 'filtering/filtering_test_data.tar', commit_message="Uploaded test data." )
+ def test_0110_review_new_changeset_functional_tests( self ):
+ '''Update the filtering repository's readme component review to reflect the presence of the readme file.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ # Get the changeset immediately prior to the tip, and pass it to the create review method.
+ changelog = self.get_repository_changelog( repository )
+ changeset_revision, ctx_revision = changelog[-2]
+ previous_review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, str( changeset_revision ) )
+ review_contents_dict = { 'Functional tests': dict( rating=5, comment='A good set of functional tests.', approved='yes', private='no' ) }
+ self.create_repository_review( repository,
+ review_contents_dict,
+ changeset_revision=self.get_repository_tip( repository ),
+ copy_from=( str( changeset_revision ), previous_review.id ) )
+ def test_0115_verify_functional_tests_review( self ):
+ '''Verify that the functional tests component review displays correctly.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed=[ 'Functional tests', 'yes', 'A good set of functional tests.' ]
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0120_upload_new_tool_version( self ):
+ '''Upload filtering 2.2.0 to the filtering repository.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ self.upload_file( repository,
+ 'filtering/filtering_2.2.0.tar',
+ commit_message="Uploaded filtering 2.2.0",
+ remove_repo_files_not_in_tar='No' )
+ def test_0125_review_new_changeset_functional_tests( self ):
+ '''Update the filtering repository's review to apply to the new changeset with filtering 2.2.0.'''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ # Get the changeset immediately prior to the tip, and pass it to the create review method.
+ changelog = self.get_repository_changelog( repository )
+ changeset_revision, ctx_revision = changelog[-2]
+ previous_review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, str( changeset_revision ) )
+ # Something needs to change so that the review will save.
+ review_contents_dict = { 'Tools': dict( rating=5, comment='Version 2.2.0 does the impossible and improves this tool.', approved='yes', private='yes' ) }
+ self.create_repository_review( repository,
+ review_contents_dict,
+ changeset_revision=self.get_repository_tip( repository ),
+ copy_from=( str( changeset_revision ), previous_review.id ) )
+ def test_0135_verify_review_for_new_version( self ):
+ '''Verify that the reviews display correctly for this changeset revision.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed = [ 'Data types', 'Functional tests', 'yes', 'A good set of functional tests.', 'README', 'yes', 'Workflows', 'Tools' ]
+ strings_displayed.extend( [ 'Clear and concise readme file, a true pleasure to read.', 'Tool dependencies', 'not_applicable' ] )
+ strings_displayed.extend( [ 'Repository dependencies', 'Version 2.2.0 does the impossible and improves this tool.' ] )
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_1050_circular_dependencies_4_levels.py
--- a/test/tool_shed/functional/test_1050_circular_dependencies_4_levels.py
+++ b/test/tool_shed/functional/test_1050_circular_dependencies_4_levels.py
@@ -163,6 +163,12 @@
freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
bismark_repository = test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
dependency_xml_path = self.generate_temp_path( 'test_1050', additional_paths=[ 'dependencies' ] )
+ # convert_chars depends on column_maker
+ # column_maker depends on convert_chars
+ # emboss depends on emboss_datatypes
+ # emboss_datatypes depends on bismark
+ # freebayes depends on freebayes, emboss, emboss_datatypes, and column_maker
+ # filtering depends on emboss
self.create_repository_dependency( convert_repository, depends_on=[ column_repository ], filepath=dependency_xml_path )
self.create_repository_dependency( column_repository, depends_on=[ convert_repository ], filepath=dependency_xml_path )
self.create_repository_dependency( emboss_datatypes_repository, depends_on=[ bismark_repository ], filepath=dependency_xml_path )
@@ -224,6 +230,7 @@
install_tool_dependencies=False,
install_repository_dependencies=True,
new_tool_panel_section='column_maker' )
+ # This should result in column_maker and convert_chars being installed, and the rest never installed.
installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ) ]
uninstalled_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
@@ -244,6 +251,7 @@
new_tool_panel_section='emboss_5_0050' )
if running_standalone:
assert original_datatypes < self.get_datatypes_count(), 'Installing a repository that depends on emboss_datatypes did not add datatypes.'
+ # Now we have emboss_datatypes, emboss, bismark, column_maker, and convert_chars installed, filtering and freebayes never installed.
installed_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
( column_repository_name, common.test_user_1_name ),
( emboss_repository_name, common.test_user_1_name ),
@@ -258,6 +266,7 @@
repository = test_db_util.get_installed_repository_by_name_owner( emboss_datatypes_repository_name, common.test_user_1_name )
self.uninstall_repository( repository, remove_from_disk=False )
assert original_datatypes > self.get_datatypes_count(), 'Deactivating emboss_datatypes did not remove datatypes.'
+ # Now we have emboss, bismark, column_maker, and convert_chars installed, filtering and freebayes never installed, and emboss_datatypes deactivated.
installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
( emboss_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ),
@@ -276,6 +285,8 @@
self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
test_db_util.ga_refresh( repository )
self.check_galaxy_repository_tool_panel_section( repository, 'emboss_5_0050' )
+ # Now we have bismark, column_maker, and convert_chars installed, filtering and freebayes never installed, emboss_datatypes deactivated,
+ # and emboss uninstalled.
installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ),
( bismark_repository_name, common.test_user_1_name ) ]
@@ -303,6 +314,8 @@
datatypes_repository.name,
datatypes_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ # Installing freebayes should automatically reinstall emboss and reactivate emboss_datatypes.
+ # Now column_maker, convert_chars, emboss, emboss_datatypes, freebayes, and bismark should be installed.
installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
( emboss_datatypes_repository_name, common.test_user_1_name ),
( emboss_repository_name, common.test_user_1_name ),
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_1070_invalid_tool.py
--- a/test/tool_shed/functional/test_1070_invalid_tool.py
+++ b/test/tool_shed/functional/test_1070_invalid_tool.py
@@ -7,7 +7,7 @@
category_name = 'Test 0070 Invalid Tool Revisions'
class TestFreebayesRepository( ShedTwillTestCase ):
- '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+ '''Test repository with multiple revisions with invalid tools.'''
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.galaxy_logout()
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
--- a/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
+++ b/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
@@ -17,7 +17,7 @@
running_standalone = False
class TestRepositoryDependencies( ShedTwillTestCase ):
- '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+ '''Testing uninstalling and reinstalling repository dependencies, and setting tool panel sections.'''
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.galaxy_logout()
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_1085_repository_dependency_handling.py
--- a/test/tool_shed/functional/test_1085_repository_dependency_handling.py
+++ b/test/tool_shed/functional/test_1085_repository_dependency_handling.py
@@ -15,7 +15,7 @@
log = logging.getLogger( __name__ )
class TestRepositoryDependencies( ShedTwillTestCase ):
- '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+ '''Testing the behavior of repository dependencies with tool panel sections.'''
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.galaxy_logout()
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/functional/test_1090_install_tool_from_tool_search.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1090_install_tool_from_tool_search.py
@@ -0,0 +1,266 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0090'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0090'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+filtering_repository_name = 'filtering_0090'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0090'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+bwa_base_repository_name = 'bwa_base_0090'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "NT space mapping with BWA"
+
+bwa_color_repository_name = 'bwa_color_0090'
+bwa_color_repository_description = "BWA Color"
+bwa_color_repository_long_description = "Color space mapping with BWA"
+
+category_name = 'Test 0090 Tool Search And Installation'
+category_description = 'Test 0090 Tool Search And Installation'
+
+running_standalone = False
+
+class TestToolSearchAndInstall( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies down to n levels.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_bwa_base_repository( self ):
+ '''Create and populate bwa_base_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ global running_standalone
+ repository = self.get_or_create_repository( name=bwa_base_repository_name,
+ description=bwa_base_repository_description,
+ long_description=bwa_base_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'bwa/bwa_base.tar',
+ strings_displayed=[],
+ commit_message='Uploaded bwa_base.tar.' )
+ def test_0010_create_bwa_color_repository( self ):
+ '''Create and populate bwa_color_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ global running_standalone
+ repository = self.get_or_create_repository( name=bwa_color_repository_name,
+ description=bwa_color_repository_description,
+ long_description=bwa_color_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'bwa/bwa_color.tar',
+ strings_displayed=[],
+ commit_message='Uploaded bwa_color.tar.' )
+ def test_0015_create_emboss_datatypes_repository( self ):
+ '''Create and populate emboss_datatypes_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ global running_standalone
+ repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+ description=emboss_datatypes_repository_description,
+ long_description=emboss_datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'emboss/datatypes/datatypes_conf.xml',
+ strings_displayed=[],
+ commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0020_create_emboss_repository( self ):
+ '''Create and populate emboss_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ global running_standalone
+ repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'emboss/emboss.tar',
+ strings_displayed=[],
+ commit_message='Uploaded tool tarball.' )
+ def test_0025_create_filtering_repository( self ):
+ '''Create and populate filtering_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ global running_standalone
+ repository = self.get_or_create_repository( name=filtering_repository_name,
+ description=filtering_repository_description,
+ long_description=filtering_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ def test_0030_create_freebayes_repository( self ):
+ '''Create and populate freebayes_0090.'''
+ category = self.create_category( name=category_name, description=category_description )
+ global running_standalone
+ repository = self.get_or_create_repository( name=freebayes_repository_name,
+ description=freebayes_repository_description,
+ long_description=freebayes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ def test_0035_create_and_upload_dependency_definitions( self ):
+ '''Create and upload repository dependency definitions.'''
+ global running_standalone
+ if running_standalone:
+ bwa_color_repository = test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+ bwa_base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ dependency_xml_path = self.generate_temp_path( 'test_0090', additional_paths=[ 'freebayes' ] )
+ self.create_repository_dependency( emboss_repository, depends_on=[ emboss_datatypes_repository ], filepath=dependency_xml_path )
+ self.create_repository_dependency( filtering_repository, depends_on=[ freebayes_repository ], filepath=dependency_xml_path )
+ self.create_repository_dependency( bwa_base_repository, depends_on=[ emboss_repository ], filepath=dependency_xml_path )
+ self.create_repository_dependency( bwa_color_repository, depends_on=[ filtering_repository ], filepath=dependency_xml_path )
+ def test_0040_verify_repository_dependencies( self ):
+ '''Verify the generated dependency structure.'''
+ bwa_color_repository = test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+ bwa_base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( emboss_repository, emboss_datatypes_repository )
+ self.check_repository_dependency( filtering_repository, freebayes_repository )
+ self.check_repository_dependency( bwa_base_repository, emboss_repository )
+ self.check_repository_dependency( bwa_color_repository, filtering_repository )
+ def test_0045_install_freebayes_repository( self ):
+ '''Install freebayes without repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ # After this test, the repositories should be in the following states:
+ # Installed: freebayes
+ # Never installed: filtering, emboss, emboss_datatypes, bwa_color, bwa_base
+ self.install_repository( freebayes_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=False,
+ new_tool_panel_section='freebayes_1090' )
+ installed_repositories = [ ( freebayes_repository_name, common.test_user_1_name ) ]
+ uninstalled_repositories = [ ( filtering_repository_name, common.test_user_1_name ),
+ ( emboss_repository_name, common.test_user_1_name ),
+ ( emboss_datatypes_repository_name, common.test_user_1_name ),
+ ( bwa_color_repository_name, common.test_user_1_name ),
+ ( bwa_base_repository_name, common.test_user_1_name ) ]
+ self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ def test_0050_install_deactivate_filtering_repository( self ):
+ '''Install and deactivate filtering.'''
+ global running_standalone
+ original_datatypes = self.get_datatypes_count()
+ # After this test, the repositories should be in the following states:
+ # Installed: freebayes
+ # Deactivated: filtering
+ # Never installed: emboss, emboss_datatypes, bwa_color, bwa_base
+ self.install_repository( filtering_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=False,
+ new_tool_panel_section='filtering_1090' )
+ installed_repositories = [ ( filtering_repository_name, common.test_user_1_name ),
+ ( freebayes_repository_name, common.test_user_1_name ) ]
+ uninstalled_repositories = [ ( emboss_repository_name, common.test_user_1_name ),
+ ( emboss_datatypes_repository_name, common.test_user_1_name ),
+ ( bwa_color_repository_name, common.test_user_1_name ),
+ ( bwa_base_repository_name, common.test_user_1_name ) ]
+ self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ filtering_repository = test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+ self.uninstall_repository( filtering_repository, remove_from_disk=False )
+ def test_0055_install_uninstall_datatypes_repository( self ):
+ '''Install and uninstall emboss_datatypes.'''
+ # After this test, the repositories should be in the following states:
+ # Installed: freebayes
+ # Deactivated: filtering
+ # Uninstalled: emboss_datatypes
+ # Never installed: emboss, bwa_color, bwa_base
+ self.install_repository( emboss_datatypes_repository_name,
+ common.test_user_1_name,
+ category_name,
+ includes_tools=False )
+ installed_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
+ ( freebayes_repository_name, common.test_user_1_name ) ]
+ uninstalled_repositories = [ ( emboss_repository_name, common.test_user_1_name ),
+ ( filtering_repository_name, common.test_user_1_name ),
+ ( bwa_color_repository_name, common.test_user_1_name ),
+ ( bwa_base_repository_name, common.test_user_1_name ) ]
+ self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ datatypes_repository = test_db_util.get_installed_repository_by_name_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ self.uninstall_repository( datatypes_repository, remove_from_disk=True )
+ def test_0060_search_for_bwa_tools( self ):
+ '''Search for and install the repositories with BWA tools, and verify that this reinstalls emboss_datatypes and reactivates filtering.'''
+ bwa_color_repository = test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+ bwa_base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ bwa_base_revision = self.get_repository_tip( bwa_base_repository )
+ bwa_color_revision = self.get_repository_tip( bwa_color_repository )
+ self.search_for_valid_tools( search_fields={ 'tool_id': 'bwa' },
+ exact_matches=False, from_galaxy=True,
+ strings_displayed=[ bwa_color_repository_name, bwa_base_repository_name, bwa_base_revision, bwa_color_revision ] )
+ strings_displayed=[ freebayes_repository_name, emboss_repository_name, filtering_repository_name ]
+ strings_displayed.extend( [ bwa_color_repository_name, bwa_base_repository_name, emboss_datatypes_repository_name ] )
+ strings_displayed.extend( [ 'bwa', 'Handle', 'tool dependencies' ] )
+ repositories_to_install = [ bwa_color_repository, bwa_base_repository ]
+ # BWA is a good candidate for testing the installation of tool dependencies, but it is a core requirement of functional
+ # tests that they be able to run independently of any network connection or remote data.
+ #
+ # After this test, the repositories should be in the following state:
+ # Installed: bwa_color, bwa_base, emboss_datatypes, emboss, filtering, freebayes
+ self.install_repositories_from_search_results( repositories_to_install,
+ install_repository_dependencies='True',
+ install_tool_dependencies=False,
+ new_tool_panel_section='bwa_1090',
+ strings_displayed=strings_displayed )
+
+ installed_repositories = [ ( emboss_repository_name, common.test_user_1_name ),
+ ( filtering_repository_name, common.test_user_1_name ),
+ ( bwa_color_repository_name, common.test_user_1_name ),
+ ( bwa_base_repository_name, common.test_user_1_name ),
+ ( emboss_datatypes_repository_name, common.test_user_1_name ),
+ ( freebayes_repository_name, common.test_user_1_name ) ]
+ uninstalled_repositories = []
+ self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/test_data/bwa/bwa_base.tar
Binary file test/tool_shed/test_data/bwa/bwa_base.tar has changed
diff -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 -r 3a2211dc42f15e0bd357de6abb7dca73221854c7 test/tool_shed/test_data/bwa/bwa_color.tar
Binary file test/tool_shed/test_data/bwa/bwa_color.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5e710a6f1c99/
changeset: 5e710a6f1c99
user: inithello
date: 2013-01-16 18:35:36
summary: Fix for removing roles from users in the tool shed.
affected #: 1 file
diff -r 4f01c6a8693e855da6abad43112afe7447da6e82 -r 5e710a6f1c99e56abb5eb7964cd1c673526866b2 lib/galaxy/web/base/controllers/admin.py
--- a/lib/galaxy/web/base/controllers/admin.py
+++ b/lib/galaxy/web/base/controllers/admin.py
@@ -224,19 +224,20 @@
role = get_role( trans, id )
if params.get( 'role_members_edit_button', False ):
in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
- for ura in role.users:
- user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
- if user not in in_users:
- # Delete DefaultUserPermissions for previously associated users that have been removed from the role
- for dup in user.default_permissions:
- if role == dup.role:
- trans.sa_session.delete( dup )
- # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role
- for history in user.histories:
- for dhp in history.default_permissions:
- if role == dhp.role:
- trans.sa_session.delete( dhp )
- trans.sa_session.flush()
+ if trans.webapp.name == 'galaxy':
+ for ura in role.users:
+ user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
+ if user not in in_users:
+ # Delete DefaultUserPermissions for previously associated users that have been removed from the role
+ for dup in user.default_permissions:
+ if role == dup.role:
+ trans.sa_session.delete( dup )
+ # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role
+ for history in user.histories:
+ for dhp in history.default_permissions:
+ if role == dhp.role:
+ trans.sa_session.delete( dhp )
+ trans.sa_session.flush()
in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups )
trans.sa_session.refresh( role )
https://bitbucket.org/galaxy/galaxy-central/commits/974d7c41b8df/
changeset: 974d7c41b8df
user: inithello
date: 2013-01-16 18:36:19
summary: Fix server error when trying to create a component review for a repository when one already exists for that changeset revision.
affected #: 1 file
diff -r 5e710a6f1c99e56abb5eb7964cd1c673526866b2 -r 974d7c41b8dfb7e77c6dfa24da05b31d684fd2e6 lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -377,6 +377,7 @@
if repository_id:
if changeset_revision:
# Make sure there is not already a review of the revision by the user.
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
if common.get_review_by_repository_id_changeset_revision_user_id( trans=trans,
repository_id=repository_id,
changeset_revision=changeset_revision,
@@ -384,7 +385,6 @@
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
# See if there are any reviews for previous changeset revisions that the user can copy.
if not create_without_copying and not previous_review_id and common.has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
16 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4f01c6a8693e/
changeset: 4f01c6a8693e
user: natefoo
date: 2013-01-16 18:30:24
summary: Remove spurious debugging statement.
affected #: 1 file
diff -r ee6ddbd3fcb35845239ea82c9062c4893e88c37e -r 4f01c6a8693e855da6abad43112afe7447da6e82 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -274,7 +274,6 @@
from galaxy.web.base.controller import ControllerUnavailable
package = import_module( package_name )
controller_dir = package.__path__[0]
- print ">>>", controller_dir, package.__path__
for fname in os.listdir( controller_dir ):
if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
name = fname[:-3]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Update history functional tests for changes in f93113b
by Bitbucket 15 Jan '13
by Bitbucket 15 Jan '13
15 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ee6ddbd3fcb3/
changeset: ee6ddbd3fcb3
user: jgoecks
date: 2013-01-15 23:17:32
summary: Update history functional tests for changes in f93113b
affected #: 2 files
diff -r 06cdf8b0b582c40fa89fc7e398fecacb3943dbfc -r ee6ddbd3fcb35845239ea82c9062c4893e88c37e test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -417,7 +417,7 @@
if active_datasets:
self.check_page_for_string( 'Create</a> a new empty history' )
self.check_page_for_string( 'Construct workflow</a> from current history' )
- self.check_page_for_string( 'Clone</a> current history' )
+ self.check_page_for_string( 'Copy</a> current history' )
self.check_page_for_string( 'Share</a> current history' )
self.check_page_for_string( 'Change default permissions</a> for current history' )
if histories_shared_by_others:
@@ -534,13 +534,13 @@
for check_str in strings_displayed:
self.check_page_for_string( check_str )
self.home()
- def clone_history( self, history_id, clone_choice, strings_displayed=[], strings_displayed_after_submit=[] ):
+ def copy_history( self, history_id, copy_choice, strings_displayed=[], strings_displayed_after_submit=[] ):
self.home()
- self.visit_page( "history/clone?id=%s" % history_id )
+ self.visit_page( "history/copy?id=%s" % history_id )
for check_str in strings_displayed:
self.check_page_for_string( check_str )
- tc.fv( '1', 'clone_choice', clone_choice )
- tc.submit( 'clone_choice_button' )
+ tc.fv( '1', 'copy_choice', copy_choice )
+ tc.submit( 'copy_choice_button' )
for check_str in strings_displayed_after_submit:
self.check_page_for_string( check_str )
self.home()
diff -r 06cdf8b0b582c40fa89fc7e398fecacb3943dbfc -r ee6ddbd3fcb35845239ea82c9062c4893e88c37e test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py
+++ b/test/functional/test_history_functions.py
@@ -215,7 +215,7 @@
# Logged in as admin_user
self.delete_current_history( strings_displayed=[ "History (%s) has been shared with others, unshare it before deleting it." % history3.name ] )
def test_030_clone_shared_history( self ):
- """Testing cloning a shared history"""
+ """Testing copying a shared history"""
# logged in as admin user
self.logout()
self.login( email=regular_user1.email )
@@ -223,9 +223,9 @@
self.history_options( user=True, histories_shared_by_others=True )
# Shared history3 should be in regular_user1's list of shared histories
self.view_shared_histories( strings_displayed=[ history3.name, admin_user.email ] )
- self.clone_history( self.security.encode_id( history3.id ),
+ self.copy_history( self.security.encode_id( history3.id ),
'activatable',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=[ 'has been created.' ] )
global history3_clone1
history3_clone1 = sa_session.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted==False,
@@ -234,11 +234,11 @@
.first()
assert history3_clone1 is not None, "Problem retrieving history3_clone1 from database"
# Check list of histories to make sure shared history3 was cloned
- strings_displayed=[ "Clone of '%s' shared by '%s'" % ( history3.name, admin_user.email ) ]
+ strings_displayed=[ "Copy of '%s' shared by '%s'" % ( history3.name, admin_user.email ) ]
self.view_stored_active_histories( strings_displayed=strings_displayed )
def test_035_clone_current_history( self ):
- """Testing cloning the current history"""
+ """Testing copying the current history"""
# logged in as regular_user1
self.logout()
self.login( email=admin_user.email )
@@ -264,9 +264,9 @@
self.delete_history_item( str( hda_3_bed.id ) )
# Test cloning activatable datasets
- self.clone_history( self.security.encode_id( history3.id ),
+ self.copy_history( self.security.encode_id( history3.id ),
'activatable',
- strings_displayed_after_submit=['is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=['has been created.' ] )
global history3_clone2
history3_clone2 = sa_session.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted==False,
@@ -276,7 +276,7 @@
assert history3_clone2 is not None, "Problem retrieving history3_clone2 from database"
# Check list of histories to make sure shared history3 was cloned
- self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history3.name ] )
+ self.view_stored_active_histories( strings_displayed=[ "Copy of '%s'" % history3.name ] )
# Switch to the cloned history to make sure activatable datasets were cloned
self.switch_history( id=self.security.encode_id( history3_clone2.id ), name=history3_clone2.name )
hda_2_bed = (
@@ -301,10 +301,10 @@
self.check_history_for_exact_string( self.security.encode_id( hda_3_bed.id ), show_deleted=True )
# Test cloning only active datasets
- self.clone_history(
+ self.copy_history(
self.security.encode_id( history3.id ),
'active',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=[ 'has been created.' ] )
global history3_clone3
history3_clone3 = (
sa_session.query( galaxy.model.History )
@@ -316,7 +316,7 @@
assert history3_clone3 is not None, "Problem retrieving history3_clone3 from database"
# Check list of histories to make sure shared history3 was cloned
- self.view_stored_active_histories( strings_displayed = ["Clone of '%s'" % history3.name ] )
+ self.view_stored_active_histories( strings_displayed = ["Copy of '%s'" % history3.name ] )
# Switch to the cloned history to make sure ONLY activatable datasets were cloned
self.switch_history( id=self.security.encode_id( history3_clone3.id ) )
@@ -428,9 +428,9 @@
# Shared history5 should be in regular_user1's list of shared histories
self.view_shared_histories( strings_displayed=[ history5.name, admin_user.email ] )
# Clone restricted history5
- self.clone_history( self.security.encode_id( history5.id ),
+ self.copy_history( self.security.encode_id( history5.id ),
'activatable',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=[ 'has been created.' ] )
global history5_clone1
history5_clone1 = sa_session.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted==False,
@@ -439,7 +439,7 @@
.first()
assert history5_clone1 is not None, "Problem retrieving history5_clone1 from database"
# Check list of histories to make sure shared history5 was cloned
- self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
+ self.view_stored_active_histories( strings_displayed=[ "Copy of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone1.id ), name=history5_clone1.name )
self.check_history_for_string( 'chr1' )
@@ -486,9 +486,9 @@
# Shared history5 should be in regular_user2's list of shared histories
self.view_shared_histories( strings_displayed=[ history5.name, admin_user.email ] )
# Clone restricted history5
- self.clone_history( self.security.encode_id( history5.id ),
+ self.copy_history( self.security.encode_id( history5.id ),
'activatable',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=[ 'has been created.' ] )
global history5_clone2
history5_clone2 = sa_session.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted==False,
@@ -497,7 +497,7 @@
.first()
assert history5_clone2 is not None, "Problem retrieving history5_clone2 from database"
# Check list of histories to make sure shared history3 was cloned
- self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
+ self.view_stored_active_histories( strings_displayed=[ "Copy of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone2.id ), name=history5_clone2.name )
# Make sure both datasets are in the history
@@ -557,15 +557,15 @@
self.share_current_history( email,
strings_displayed_after_submit=strings_displayed_after_submit,
action='share_anyway' )
- # Check security on clone of history5 for regular_user2
+ # Check security on copy of history5 for regular_user2
self.logout()
self.login( email=regular_user2.email )
# Shared history5 should be in regular_user2's list of shared histories
self.view_shared_histories( strings_displayed=[ history5.name, admin_user.email ] )
# Clone restricted history5
- self.clone_history( self.security.encode_id( history5.id ),
+ self.copy_history( self.security.encode_id( history5.id ),
'activatable',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=[ 'has been created.' ] )
global history5_clone3
history5_clone3 = (
sa_session.query( galaxy.model.History )
@@ -576,7 +576,7 @@
assert history5_clone3 is not None, "Problem retrieving history5_clone3 from database"
# Check list of histories to make sure shared history3 was cloned
- self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
+ self.view_stored_active_histories( strings_displayed=[ "Copy of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone3.id ), name=history5_clone3.name )
# Make sure both datasets are in the history
@@ -602,16 +602,16 @@
self.display_history_item( str( hda_2_bed.id ), strings_displayed=[ 'chr1' ] )
# Delete the clone so the next test will be valid
self.delete_history( id=self.security.encode_id( history5_clone3.id ) )
- # Check security on clone of history5 for regular_user3
+ # Check security on copy of history5 for regular_user3
self.logout()
self.login( email=regular_user3.email )
# Shared history5 should be in regular_user2's list of shared histories
self.view_shared_histories( strings_displayed=[ history5.name, admin_user.email ] )
# Clone restricted history5
- self.clone_history( self.security.encode_id( history5.id ),
+ self.copy_history( self.security.encode_id( history5.id ),
'activatable',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ strings_displayed_after_submit=[ 'has been created.' ] )
global history5_clone4
history5_clone4 = (
sa_session.query( galaxy.model.History )
@@ -622,7 +622,7 @@
assert history5_clone4 is not None, "Problem retrieving history5_clone4 from database"
# Check list of histories to make sure shared history3 was cloned
- self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
+ self.view_stored_active_histories( strings_displayed=[ "Copy of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone4.id ), name=history5_clone4.name )
# Make sure both datasets are in the history
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b45675d0e019/
changeset: b45675d0e019
user: jmchilton
date: 2012-12-18 23:28:09
summary: Add more commonly generated Galaxy runtime files to .hgignore.
affected #: 1 file
diff -r eae248415389203907b5b951f139a200024ae069 -r b45675d0e0192952e9466d843fe0f17a9a6038d3 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -16,6 +16,7 @@
database/community_files
database/compiled_templates
database/files
+database/job_working_directory
database/pbs
database/tmp
database/*.sqlite
@@ -23,6 +24,11 @@
# Python bytecode
*.pyc
+# Tool Shed Runtime Files
+community_webapp.log
+community_webapp.pid
+hgweb.config*
+
# Config files
universe_wsgi.ini
reports_wsgi.ini
https://bitbucket.org/galaxy/galaxy-central/commits/06cdf8b0b582/
changeset: 06cdf8b0b582
user: dannon
date: 2013-01-15 23:09:33
summary: Merged in jmchilton/galaxy-central-hgignore-fixes (pull request #100: Add more commonly generated Galaxy runtime files to .hgignore.)
affected #: 1 file
diff -r 4bd419751ed3e8cc54913fa37389111a0e7faaa9 -r 06cdf8b0b582c40fa89fc7e398fecacb3943dbfc .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -16,6 +16,7 @@
database/community_files
database/compiled_templates
database/files
+database/job_working_directory
database/pbs
database/tmp
database/*.sqlite
@@ -23,6 +24,11 @@
# Python bytecode
*.pyc
+# Tool Shed Runtime Files
+community_webapp.log
+community_webapp.pid
+hgweb.config*
+
# Config files
universe_wsgi.ini
reports_wsgi.ini
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
15 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4bd419751ed3/
changeset: 4bd419751ed3
user: dan
date: 2013-01-15 22:46:47
summary: Make galaxy.tools.Tool a new-style class.
affected #: 1 file
diff -r 52395842f8e65a6484268d3db1e3dbac09d60089 -r 4bd419751ed3e8cc54913fa37389111a0e7faaa9 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -813,7 +813,7 @@
self.attributes['split_size'] = 20
self.attributes['split_mode'] = 'number_of_parts'
-class Tool:
+class Tool( object ):
"""
Represents a computational tool that can be executed through Galaxy.
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Change 'clone workflow/visualization' to 'copy workflow/visualization' in user interface and associated code.
by Bitbucket 15 Jan '13
by Bitbucket 15 Jan '13
15 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/52395842f8e6/
changeset: 52395842f8e6
user: jgoecks
date: 2013-01-15 22:16:55
summary: Change 'clone workflow/visualization' to 'copy workflow/visualization' in user interface and associated code.
affected #: 4 files
diff -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 -r 52395842f8e65a6484268d3db1e3dbac09d60089 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -190,7 +190,7 @@
grids.GridOperation( "Open", allow_multiple=False, url_args=get_url_args ),
grids.GridOperation( "Open in Circster", allow_multiple=False, condition=( lambda item: item.type == 'trackster' ), url_args=dict( action='circster' ) ),
grids.GridOperation( "Edit Attributes", allow_multiple=False, url_args=dict( action='edit') ),
- grids.GridOperation( "Copy", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False, url_args=dict( action='clone') ),
+ grids.GridOperation( "Copy", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False, url_args=dict( action='copy') ),
grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), async_compatible=True, confirm="Are you sure you want to delete this visualization?" ),
]
@@ -352,7 +352,7 @@
@web.expose
@web.require_login()
- def clone(self, trans, id, *args, **kwargs):
+ def copy(self, trans, id, *args, **kwargs):
visualization = self.get_visualization( trans, id, check_ownership=False )
user = trans.get_user()
owner = ( visualization.user == user )
@@ -360,15 +360,15 @@
if not owner:
new_title += " shared by %s" % visualization.user.email
- cloned_visualization = visualization.copy( user=trans.user, title=new_title )
+ copied_viz = visualization.copy( user=trans.user, title=new_title )
# Persist
session = trans.sa_session
- session.add( cloned_visualization )
+ session.add( copied_viz )
session.flush()
# Display the management page
- trans.set_message( 'Copy created with name "%s"' % cloned_visualization.title )
+ trans.set_message( 'Created new visualization with name "%s"' % copied_viz.title )
return self.list( trans )
@web.expose
diff -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 -r 52395842f8e65a6484268d3db1e3dbac09d60089 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -51,7 +51,7 @@
operations = [
grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Run", condition=( lambda item: not item.deleted ), async_compatible=False ),
- grids.GridOperation( "Clone", condition=( lambda item: not item.deleted ), async_compatible=False ),
+ grids.GridOperation( "Copy", condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Sharing", condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Delete", condition=( lambda item: item.deleted ), async_compatible=True ),
@@ -499,8 +499,8 @@
@web.expose
@web.require_login( "use Galaxy workflows" )
- def clone( self, trans, id ):
- # Get workflow to clone.
+ def copy( self, trans, id ):
+ # Get workflow to copy.
stored = self.get_stored_workflow( trans, id, check_ownership=False )
user = trans.get_user()
if stored.user == user:
@@ -511,11 +511,11 @@
error( "Workflow is not owned by or shared with current user" )
owner = False
- # Clone.
+ # Copy.
new_stored = model.StoredWorkflow()
- new_stored.name = "Clone of '%s'" % stored.name
+ new_stored.name = "Copy of '%s'" % stored.name
new_stored.latest_workflow = stored.latest_workflow
- # Clone annotation.
+ # Copy annotation.
annotation_obj = self.get_item_annotation_obj( trans.sa_session, stored.user, stored )
if annotation_obj:
self.add_item_annotation( trans.sa_session, trans.get_user(), new_stored, annotation_obj.annotation )
@@ -528,7 +528,7 @@
session.add( new_stored )
session.flush()
# Display the management page
- trans.set_message( 'Clone created with name "%s"' % new_stored.name )
+ trans.set_message( 'Created new workflow with name "%s"' % new_stored.name )
return self.list( trans )
@web.expose
diff -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 -r 52395842f8e65a6484268d3db1e3dbac09d60089 templates/visualization/list.mako
--- a/templates/visualization/list.mako
+++ b/templates/visualization/list.mako
@@ -48,7 +48,7 @@
<td><div popupmenu="shared-${i}-popup"><a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}" target="_top">View</a>
- <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(visualization.id) )}">Copy</a>
+ <a class="action-button" href="${h.url_for( action='copy', id=trans.security.encode_id(visualization.id) )}">Copy</a></div></td></tr>
diff -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 -r 52395842f8e65a6484268d3db1e3dbac09d60089 templates/workflow/list.mako
--- a/templates/workflow/list.mako
+++ b/templates/workflow/list.mako
@@ -67,7 +67,7 @@
<a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id( workflow.id ) )}" target="_parent">Run</a><a class="action-button" href="${h.url_for( controller='workflow', action='sharing', id=trans.security.encode_id( workflow.id ) )}">Share or Publish</a><a class="action-button" href="${h.url_for( controller='workflow', action='export', id=trans.security.encode_id( workflow.id ) )}">Download or Export</a>
- <a class="action-button" href="${h.url_for( controller='workflow', action='clone', id=trans.security.encode_id( workflow.id ) )}">Clone</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='copy', id=trans.security.encode_id( workflow.id ) )}">Copy</a><a class="action-button" href="${h.url_for( controller='workflow', action='rename', id=trans.security.encode_id( workflow.id ) )}">Rename</a><a class="action-button" href="${h.url_for( controller='workflow', action='display_by_id', id=trans.security.encode_id( workflow.id ) )}" target="_top">View</a><a class="action-button" confirm="Are you sure you want to delete workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( controller='workflow', action='delete', id=trans.security.encode_id( workflow.id ) )}">Delete</a>
@@ -102,7 +102,7 @@
<div popupmenu="shared-${i}-popup"><a class="action-button" href="${h.url_for( controller='workflow', action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug )}" target="_top">View</a><a class="action-button" href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id( workflow.id ) )}">Run</a>
- <a class="action-button" href="${h.url_for( controller='workflow', action='clone', id=trans.security.encode_id( workflow.id ) )}">Clone</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='copy', id=trans.security.encode_id( workflow.id ) )}">Copy</a><a class="action-button" confirm="Are you sure you want to remove the shared workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( controller='workflow', action='sharing', unshare_me=True, id=trans.security.encode_id( workflow.id ))}">Remove</a></div></td>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Change 'clone history' to 'copy history' in user interface and associated code.
by Bitbucket 15 Jan '13
by Bitbucket 15 Jan '13
15 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f93113b8e477/
changeset: f93113b8e477
user: jgoecks
date: 2013-01-15 16:48:35
summary: Change 'clone history' to 'copy history' in user interface and associated code.
affected #: 5 files
diff -r ec543816a2a2b6fea98e3353e65f9e9c30d62cbe -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 lib/galaxy/webapps/galaxy/controllers/history.py
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -150,7 +150,7 @@
]
operations = [
grids.GridOperation( "View", allow_multiple=False, target="_top" ),
- grids.GridOperation( "Clone" ),
+ grids.GridOperation( "Copy" ),
grids.GridOperation( "Unshare" )
]
standard_filters = []
@@ -400,13 +400,13 @@
# Display history.
history = self.get_history( trans, ids[0], False)
return self.display_by_username_and_slug( trans, history.user.username, history.slug )
- elif operation == "clone":
+ elif operation == "copy":
if not ids:
- message = "Select a history to clone"
+ message = "Select a history to copy"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
- # When cloning shared histories, only copy active datasets
- new_kwargs = { 'clone_choice' : 'active' }
- return self.clone( trans, ids, **new_kwargs )
+ # When copying shared histories, only copy active datasets
+ new_kwargs = { 'copy_choice' : 'active' }
+ return self.copy( trans, ids, **new_kwargs )
elif operation == 'unshare':
if not ids:
message = "Select a history to unshare"
@@ -957,7 +957,7 @@
def share( self, trans, id=None, email="", **kwd ):
# If a history contains both datasets that can be shared and others that cannot be shared with the desired user,
# then the entire history is shared, and the protected datasets will be visible, but inaccessible ( greyed out )
- # in the cloned history
+ # in the copyd history
params = util.Params( kwd )
user = trans.get_user()
# TODO: we have too many error messages floating around in here - we need
@@ -1270,15 +1270,16 @@
return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
@web.expose
- @web.require_login( "clone shared Galaxy history" )
- def clone( self, trans, id=None, **kwd ):
- """Clone a list of histories"""
+ @web.require_login( "copy shared Galaxy history" )
+ def copy( self, trans, id=None, **kwd ):
+ """Copy one or more histories"""
params = util.Params( kwd )
- # If clone_choice was not specified, display form passing along id
+ # If copy_choice was not specified, display form passing along id
# argument
- clone_choice = params.get( 'clone_choice', None )
- if not clone_choice:
- return trans.fill_template( "/history/clone.mako", id_argument=id )
+ copy_choice = params.get( 'copy_choice', None )
+ if not copy_choice:
+ return trans.fill_template( "/history/copy.mako", id_argument=id )
+
# Extract histories for id argument, defaulting to current
if id is None:
histories = [ trans.history ]
@@ -1296,20 +1297,20 @@
if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
.filter_by( user=user, history=history ) \
.count() == 0:
- return trans.show_error_message( "The history you are attempting to clone is not owned by you or shared with you. " )
+ return trans.show_error_message( "The history you are attempting to copy is not owned by you or shared with you. " )
owner = False
- name = "Clone of '%s'" % history.name
+ name = "Copy of '%s'" % history.name
if not owner:
name += " shared by '%s'" % history.user.email
- if clone_choice == 'activatable':
+ if copy_choice == 'activatable':
new_history = history.copy( name=name, target_user=user, activatable=True )
- elif clone_choice == 'active':
+ elif copy_choice == 'active':
name += " (active items only)"
new_history = history.copy( name=name, target_user=user )
if len( histories ) == 1:
- msg = 'Clone with name "<a href="%s" target="_top">%s</a>" is now included in your previously stored histories.' % ( url_for( controller="history", action="switch_to_history", hist_id=trans.security.encode_id( new_history.id ) ) , new_history.name )
+ msg = 'New history "<a href="%s" target="_top">%s</a>" has been created.' % ( url_for( controller="history", action="switch_to_history", hist_id=trans.security.encode_id( new_history.id ) ) , new_history.name )
else:
- msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
+ msg = 'Copied and created %d new histories.' % len( histories )
return trans.show_ok_message( msg )
@web.expose
diff -r ec543816a2a2b6fea98e3353e65f9e9c30d62cbe -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 templates/history/clone.mako
--- a/templates/history/clone.mako
+++ /dev/null
@@ -1,27 +0,0 @@
-<% _=n_ %>
-<%inherit file="/base.mako"/>
-<%def name="title()">Clone History</%def>
-
-<div class="toolForm">
- <div class="toolFormTitle">Clone History</div>
- <div class="toolFormBody">
- <form action="${h.url_for( controller='history', action='clone' )}" method="post" >
- <div class="form-row">
- %if id_argument is not None:
- <input type="hidden" name="id" value="${id_argument}">
- %endif
- You can clone the history such that the clone will include all items in the original
- history, or you can eliminate the original history's deleted items from the clone.
- </div>
- <div class="form-row">
- <input type="radio" name="clone_choice" value="activatable"> Clone all history items, including deleted items
- </div>
- <div class="form-row">
- <input type="radio" name="clone_choice" value="active"> Clone only items that are not deleted
- </div>
- <div class="form-row">
- <input type="submit" name="clone_choice_button" value="Clone">
- </div>
- </form>
- </div>
-</div>
diff -r ec543816a2a2b6fea98e3353e65f9e9c30d62cbe -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 templates/history/copy.mako
--- /dev/null
+++ b/templates/history/copy.mako
@@ -0,0 +1,27 @@
+<% _=n_ %>
+<%inherit file="/base.mako"/>
+<%def name="title()">Copy History</%def>
+
+<div class="toolForm">
+ <div class="toolFormTitle">Copy History</div>
+ <div class="toolFormBody">
+ <form action="${h.url_for( controller='history', action='copy' )}" method="post" >
+ <div class="form-row">
+ %if id_argument is not None:
+ <input type="hidden" name="id" value="${id_argument}">
+ %endif
+ You can make a copy of the history that includes all datasets in the original history or just the active
+ (not deleted) datasets.
+ </div>
+ <div class="form-row">
+ <input type="radio" name="copy_choice" value="activatable"> Copy all datasets, including deleted ones
+ </div>
+ <div class="form-row">
+ <input type="radio" name="copy_choice" value="active"> Copy only active (not deleted) datasets
+ </div>
+ <div class="form-row">
+ <input type="submit" name="copy_choice_button" value="Copy">
+ </div>
+ </form>
+ </div>
+</div>
diff -r ec543816a2a2b6fea98e3353e65f9e9c30d62cbe -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 templates/history/options.mako
--- a/templates/history/options.mako
+++ b/templates/history/options.mako
@@ -16,7 +16,7 @@
%if len( history.active_datasets ) > 0:
<li><a href="${h.url_for( controller='root', action='history_new' )}">Create</a> a new empty history</li><li><a href="${h.url_for( controller='workflow', action='build_from_current_history' )}">Construct workflow</a> from current history</li>
- <li><a href="${h.url_for( controller='history', action='clone', id=trans.security.encode_id( history.id ) )}">Clone</a> current history</li>
+ <li><a href="${h.url_for( controller='history', action='copy', id=trans.security.encode_id( history.id ) )}">Copy</a> current history</li>
%endif
<li><a href="${h.url_for( controller='history', action='share' )}" target="galaxy_main">Share</a> current history</div><li><a href="${h.url_for( controller='root', action='history_set_default_permissions' )}">Change default permissions</a> for current history</li>
diff -r ec543816a2a2b6fea98e3353e65f9e9c30d62cbe -r f93113b8e4771c31aed6dd39aba38d09e11ffb31 templates/root/index.mako
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -22,8 +22,8 @@
"${_("Create New")}": function() {
galaxy_history.location = "${h.url_for( controller='root', action='history_new' )}";
},
- "${_("Clone")}": function() {
- galaxy_main.location = "${h.url_for( controller='history', action='clone')}";
+ "${_("Copy History")}": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='copy')}";
},
"${_("Copy Datasets")}": function() {
galaxy_main.location = "${h.url_for( controller='dataset', action='copy_datasets' )}";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0