1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/598d633c9caa/
Changeset: 598d633c9caa
User: jmchilton
Date: 2013-09-03 19:32:02
Summary: Add ucsc_tools requirement for extract_genomic_dna.xml tool.
Affected #: 1 file
diff -r fd380581d0073c6a08cca74e34434f058804de3d -r 598d633c9caa9cccbd28e8fc57f650c1b8564371 tools/extract/extract_genomic_dna.xml
--- a/tools/extract/extract_genomic_dna.xml
+++ b/tools/extract/extract_genomic_dna.xml
@@ -53,6 +53,7 @@
</outputs><requirements><requirement type="binary">faToTwoBit</requirement>
+ <requirement type="package">ucsc_tools</requirement></requirements><tests><test>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fd380581d007/
Changeset: fd380581d007
User: Dave Bouvier
Date: 2013-09-10 22:35:48
Summary: Fix for displaying repository dependencies that are not filtered out.
Affected #: 1 file
diff -r cd91ec8f14059167cdfda2ab606035c97ead5469 -r fd380581d0073c6a08cca74e34434f058804de3d lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -625,10 +625,7 @@
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( required_rd_tup )
if not asbool( only_if_compiling_contained_td ):
- if rd_key in filtered_key_rd_dict:
- filtered_key_rd_dict[ rd_key ].append( required_rd_tup )
- else:
- filtered_key_rd_dict[ rd_key ] = [ required_rd_tup ]
+ filtered_key_rd_dict[ rd_key ] = required_rd_tup
return filtered_key_rd_dict
def merge_missing_repository_dependencies_to_installed_container( containers_dict ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cd91ec8f1405/
Changeset: cd91ec8f1405
User: Dave Bouvier
Date: 2013-09-10 20:35:06
Summary: Fix for filtering out repository dependencies that are only required when compiling a tool dependency because the precompiled binary was not found or failed to install.
Affected #: 1 file
diff -r 595c30bc8df5dfd626d78cc4e030ac1910b54f4e -r cd91ec8f14059167cdfda2ab606035c97ead5469 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -678,7 +678,7 @@
current_repository_key_rd_dicts = get_updated_changeset_revisions_for_repository_dependencies( trans, current_repository_key_rd_dicts )
for key_rd_dict in current_repository_key_rd_dicts:
# Filter out repository dependencies that are required only if compiling the dependent repository's tool dependency.
- all_repository_dependencieskey_rd_dict = filter_only_if_compiling_contained_td( key_rd_dict )
+ key_rd_dict = filter_only_if_compiling_contained_td( key_rd_dict )
if key_rd_dict:
is_circular = False
if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/595c30bc8df5/
Changeset: 595c30bc8df5
User: greg
Date: 2013-09-10 20:19:46
Summary: Slight refactoring of message generation in the tool shed.
Affected #: 5 files
diff -r 04a3137539d245fa294f73f8b6467a5b13e6d028 -r 595c30bc8df5dfd626d78cc4e030ac1910b54f4e lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -2159,11 +2159,17 @@
all_repository_dependencies=None,
handled_key_rd_dicts=None )
if str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION:
- # Handle messaging for orphan tool dependencies.
- orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata )
- if orphan_message:
- message += orphan_message
+ # Handle messaging for resetting repository type to the optimal value.
+ change_repository_type_message = tool_dependency_util.generate_message_for_repository_type_change( trans, repository )
+ if change_repository_type_message:
+ message += change_repository_type_message
status = 'warning'
+ else:
+ # Handle messaging for orphan tool dependency definitions.
+ orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata )
+ if orphan_message:
+ message += orphan_message
+ status = 'warning'
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
diff -r 04a3137539d245fa294f73f8b6467a5b13e6d028 -r 595c30bc8df5dfd626d78cc4e030ac1910b54f4e lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -207,15 +207,20 @@
else:
metadata_dict = {}
if str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION:
- # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
- # or some other problem. Tool dependency definitions can define orphan tool dependencies (no relationship to any tools contained in the repository),
- # so warning messages are important because orphans are always valid. The repository owner must be warned in case they did not intend to define an
- # orphan dependency, but simply provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
- # Handle messaging for orphan tool dependencies.
- orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata_dict )
- if orphan_message:
- message += orphan_message
+ change_repository_type_message = tool_dependency_util.generate_message_for_repository_type_change( trans, repository )
+ if change_repository_type_message:
+ message += change_repository_type_message
status = 'warning'
+ else:
+ # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
+ # or some other problem. Tool dependency definitions can define orphan tool dependencies (no relationship to any tools contained in the repository),
+ # so warning messages are important because orphans are always valid. The repository owner must be warned in case they did not intend to define an
+ # orphan dependency, but simply provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
+ # Handle messaging for orphan tool dependencies.
+ orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata_dict )
+ if orphan_message:
+ message += orphan_message
+ status = 'warning'
# Handle messaging for invalid tool dependencies.
invalid_tool_dependencies_message = tool_dependency_util.generate_message_for_invalid_tool_dependencies( metadata_dict )
if invalid_tool_dependencies_message:
diff -r 04a3137539d245fa294f73f8b6467a5b13e6d028 -r 595c30bc8df5dfd626d78cc4e030ac1910b54f4e lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -906,13 +906,13 @@
repository_dependency_tups=invalid_repository_dependency_tups,
is_valid=False,
description=description )
- # We need to continue to restrict the behavior of orphan tool dependencies, possibly eliminating them altoghether at some point.
+ # We need to continue to restrict the behavior for defining orphan tool dependencies, possibly eliminating them altoghether at some point.
check_for_orphan_tool_dependencies = False
if app.name == 'tool_shed':
- if repository.type == rt_util.UNRESTRICTED and 'tools' not in metadata_dict:
+ if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and not repository.can_change_type_to( app, rt_util.TOOL_DEPENDENCY_DEFINITION ):
check_for_orphan_tool_dependencies = True
- elif 'tools' in metadata_dict:
- check_for_orphan_tool_dependencies = True
+ elif 'tools' in metadata_dict:
+ check_for_orphan_tool_dependencies = True
if check_for_orphan_tool_dependencies:
# Determine and store orphan tool dependencies.
orphan_tool_dependencies = get_orphan_tool_dependencies( metadata_dict )
diff -r 04a3137539d245fa294f73f8b6467a5b13e6d028 -r 595c30bc8df5dfd626d78cc4e030ac1910b54f4e lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -150,14 +150,17 @@
version = requirements_dict[ 'version' ]
message += "<b>* name:</b> %s, <b>type:</b> %s, <b>version:</b> %s<br/>" % ( str( name ), str( type ), str( version ) )
message += "<br/>"
- elif repository.can_change_type_to( trans.app, rt_util.TOOL_DEPENDENCY_DEFINITION ):
- tool_dependency_definition_type_class = trans.app.repository_types_registry.get_class_by_label( rt_util.TOOL_DEPENDENCY_DEFINITION )
- message += "This repository currently contains a single file named <b>%s</b>. If additional files will " % suc.TOOL_DEPENDENCY_DEFINITION_FILENAME
- message += "not be added to this repository, then it's type should be set to <b>%s</b>.<br/>" % tool_dependency_definition_type_class.label
- else:
- message += "This repository contains no tools, so it's defined tool dependencies are considered orphans within this repository.<br/>"
return message
+def generate_message_for_repository_type_change( trans, repository ):
+ message = ''
+ if repository.can_change_type_to( trans.app, rt_util.TOOL_DEPENDENCY_DEFINITION ):
+ tool_dependency_definition_type_class = trans.app.repository_types_registry.get_class_by_label( rt_util.TOOL_DEPENDENCY_DEFINITION )
+ message += "This repository currently contains a single file named <b>%s</b>. If additional files will " % suc.TOOL_DEPENDENCY_DEFINITION_FILENAME
+ message += "not be added to this repository, then it's type should be set to <b>%s</b>.<br/>" % tool_dependency_definition_type_class.label
+ return message
+
+
def get_download_url_for_platform( url_templates, platform_info_dict ):
'''
Compare the dict returned by get_platform_info() with the values specified in the url_template element. Return
diff -r 04a3137539d245fa294f73f8b6467a5b13e6d028 -r 595c30bc8df5dfd626d78cc4e030ac1910b54f4e test/tool_shed/functional/test_0100_complex_repository_dependencies.py
--- a/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
@@ -54,7 +54,7 @@
strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
strings_not_displayed=[] )
# Visit the manage repository page for package_bwa_0_5_9_0100.
- self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'will not be', 'to this repository' ] )
def test_0010_create_bwa_base_repository( self ):
'''Create and populate bwa_base_0100.'''
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/04a3137539d2/
Changeset: 04a3137539d2
User: greg
Date: 2013-09-10 19:29:18
Summary: Don't generate error messages when resetting metadata on installed tool shed repositories that have repository dependencies that are not installed because they were not needed for compiling the dependent repository's tool dependency.
Affected #: 1 file
diff -r 1e30cdb6d3b82a68da8daddc86411eecc33be444 -r 04a3137539d245fa294f73f8b6467a5b13e6d028 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1195,14 +1195,17 @@
repository = suc.get_repository_for_dependency_relationship( app, cleaned_toolshed, name, owner, updated_changeset_revision )
if repository:
return repository_dependency_tup, is_valid, error_message
- # We'll currently default to setting the repository dependency definition as invalid if an installed repository cannot be found.
- # This may not be ideal because the tool shed may have simply been inaccessible when metadata was being generated for the installed
- # tool shed repository.
- error_message = "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( toolshed, name, owner, changeset_revision )
- log.debug( error_message )
- is_valid = False
- return repository_dependency_tup, is_valid, error_message
+ # Don't generate an error message for missing repository dependencies that are required only if compiling the dependent repository's
+ # tool dependency.
+ if not only_if_compiling_contained_td:
+ # We'll currently default to setting the repository dependency definition as invalid if an installed repository cannot be found.
+ # This may not be ideal because the tool shed may have simply been inaccessible when metadata was being generated for the installed
+ # tool shed repository.
+ error_message = "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
+ ( toolshed, name, owner, changeset_revision )
+ log.debug( error_message )
+ is_valid = False
+ return repository_dependency_tup, is_valid, error_message
else:
# We're in the tool shed.
if suc.tool_shed_is_this_tool_shed( toolshed ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0bc38956530a/
Changeset: 0bc38956530a
Branch: dataset-cleanup
User: lance_parsons
Date: 2013-04-18 17:24:09
Summary: Basic administrative dataset cleanup script
Affected #: 3 files
diff -r 18b23ed8de5b384f142db349e7379c03567758df -r 0bc38956530a382faea4b42e771037672c2a6b0c scripts/cleanup_datasets/admin_cleanup_datasets.py
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_datasets.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+"""
+Mark datasets as deleted that are older than specified cutoff
+and (optionaly) with a tool_id that matches the specified search
+string.
+
+This script is useful for administrators to cleanup after users who
+leave many old datasets around. It was modeled after the cleanup_datasets.py
+script originally distributed with Galaxy.
+
+Basic Usage:
+ admin_cleanup_datasets.py universe_wsgi.ini -d 60 \
+ --template=email_template.txt
+
+Required Arguments:
+ config_file - the Galaxy configuration file (universe_wsgi.ini)
+
+Optional Arguments:
+ -d --days - number of days old the dataset must be (default: 60)
+ --tool_id - string to search for in dataset tool_id
+ --template - Mako template file to use for email notification
+ -i --info_only - Print results, but don't email or delete anything
+ -e --email_only - Email notifications, but don't delete anything
+ Useful for notifying users of pending deletion
+
+ --smtp - Specify smtp server
+ If not specified, use smtp settings specified in config file
+ --fromaddr - Specify from address
+ If not specified, use error_email_to specified in config file
+
+Email Template Variables:
+ cutoff - the cutoff in days
+ email - the users email address
+ datasets - a list of tuples containing 'dataset' and 'history' names
+
+
+Author: Lance Parsons (lparsons(a)princeton.edu)
+"""
+import os
+import sys
+import shutil
+import logging
+from collections import defaultdict
+
+log = logging.getLogger()
+log.setLevel(10)
+log.addHandler(logging.StreamHandler(sys.stdout))
+
+from cleanup_datasets import CleanupDatasetsApplication
+import pkg_resources
+pkg_resources.require("SQLAlchemy >= 0.4")
+
+#pkg_resources.require("Mako")
+from mako.template import Template
+
+import time
+import ConfigParser
+from datetime import datetime, timedelta
+from time import strftime
+from optparse import OptionParser
+
+import galaxy.config
+import galaxy.model.mapping
+import sqlalchemy as sa
+from galaxy.model.orm import and_
+import galaxy.util
+
+assert sys.version_info[:2] >= (2, 4)
+
+
+def main():
+ """
+ Datasets that are older than the specified cutoff and for which the tool_id
+ contains the specified text will be marked as deleted in user's history and
+ the user will be notified by email using the specified template file.
+ """
+ parser = OptionParser()
+ parser.add_option("-d", "--days", dest="days", action="store",
+ type="int", help="number of days (60)", default=60)
+ parser.add_option("--tool_id", default="",
+ help="Text to match against tool_id")
+ parser.add_option("--template", default=None,
+ help="Mako Template file to use as email "
+ "Variables are 'cutoff' for the cutoff in days, "
+ "'email' for users email and "
+ "'datasets' which is a list of tuples "
+ "containing 'dataset' and 'history' names. "
+ "Default: admin_cleanup_deletion_template.txt")
+ parser.add_option("-i", "--info_only", action="store_true",
+ dest="info_only", help="info about the requested action",
+ default=False)
+ parser.add_option("-e", "--email_only", action="store_true",
+ dest="email_only", help="Send emails only, don't delete",
+ default=False)
+ parser.add_option("--smtp", default=None,
+ help="SMTP Server to use to send email. "
+ "Default: [read from galaxy ini file]")
+ parser.add_option("--fromaddr", default=None,
+ help="From address to use to send email. "
+ "Default: [read from galaxy ini file]")
+ (options, args) = parser.parse_args()
+ ini_file = args[0]
+
+ config_parser = ConfigParser.ConfigParser({'here': os.getcwd()})
+ config_parser.read(ini_file)
+ config_dict = {}
+ for key, value in config_parser.items("app:main"):
+ config_dict[key] = value
+
+ if options.smtp is not None:
+ config_dict['smtp_server'] = options.smtp
+ if config_dict.get('smtp_server') is None:
+ parser.error("SMTP Server must be specified as an option (--smtp) "
+ "or in the config file (smtp_server)")
+
+ if options.fromaddr is not None:
+ config_dict['error_email_to'] = options.fromaddr
+ if config_dict.get('error_email_to') is None:
+ parser.error("From address must be specified as an option "
+ "(--fromaddr) or in the config file "
+ "(error_email_to)")
+
+ scriptdir = os.path.dirname(os.path.abspath(__file__))
+ template_file = options.template
+ if template_file is None:
+ default_template = os.path.join(scriptdir,
+ 'admin_cleanup_deletion_template.txt')
+ sample_template_file = "%s.sample" % default_template
+ if os.path.exists(default_template):
+ template_file = default_template
+ elif os.path.exists(sample_template_file):
+ print "Copying %s to %s" % (sample_template_file, default_template)
+ shutil.copyfile(sample_template_file, default_template)
+ template_file = default_template
+ else:
+ parser.error("Default template (%s) or sample template (%s) not "
+ "found, please specify template as an option "
+ "(--template)." % default_template,
+ sample_template_file)
+ elif not os.path.exists(template_file):
+ parser.error("Specified template file (%s) not found." % template_file)
+
+ config = galaxy.config.Configuration(**config_dict)
+
+ app = CleanupDatasetsApplication(config)
+ cutoff_time = datetime.utcnow() - timedelta(days=options.days)
+ now = strftime("%Y-%m-%d %H:%M:%S")
+
+ print "##########################################"
+ print "\n# %s - Handling stuff older than %i days" % (now, options.days)
+
+ if options.info_only:
+ print "# Displaying info only ( --info_only )\n"
+ elif options.email_only:
+ print "# Sending emails only, not deleting ( --email_only )\n"
+
+ administrative_delete_datasets(
+ app, cutoff_time, options.days, tool_id=options.tool_id,
+ template_file=template_file, config=config,
+ email_only=options.email_only, info_only=options.info_only)
+ app.shutdown()
+ sys.exit(0)
+
+
+def administrative_delete_datasets(app, cutoff_time, cutoff_days,
+ tool_id, template_file,
+ config, email_only=False,
+ info_only=False):
+ # Marks dataset history association deleted and email users
+ start = time.time()
+ # We really only need the id column here, but sqlalchemy barfs when
+ # trying to select only 1 column
+ hda_ids_query = sa.select(
+ (app.model.HistoryDatasetAssociation.table.c.id,
+ app.model.HistoryDatasetAssociation.table.c.deleted),
+ whereclause=and_(
+ app.model.Dataset.table.c.deleted == False,
+ app.model.HistoryDatasetAssociation.table.c.update_time
+ < cutoff_time,
+ app.model.Job.table.c.tool_id.like("%%%s%%" % tool_id),
+ app.model.HistoryDatasetAssociation.table.c.deleted == False),
+ from_obj=[sa.outerjoin(
+ app.model.Dataset.table,
+ app.model.HistoryDatasetAssociation.table)
+ .outerjoin(app.model.JobToOutputDatasetAssociation.table)
+ .outerjoin(app.model.Job.table)])
+ deleted_instance_count = 0
+ # skip = []
+ user_notifications = defaultdict(list)
+ # Add all datasets associated with Histories to our list
+ hda_ids = []
+ hda_ids.extend(
+ [row.id for row in hda_ids_query.execute()])
+ # Process each of the Dataset objects
+ for hda_id in hda_ids:
+ user_query = sa.select(
+ [app.model.HistoryDatasetAssociation.table,
+ app.model.History.table,
+ app.model.User.table],
+ whereclause=and_(
+ app.model.HistoryDatasetAssociation.table.c.id == hda_id),
+ from_obj=[sa.join(app.model.User.table,
+ app.model.History.table)
+ .join(app.model.HistoryDatasetAssociation.table)],
+ use_labels=True)
+ for result in user_query.execute():
+ user_notifications[result[app.model.User.table.c.email]].append(
+ (result[app.model.HistoryDatasetAssociation.table.c.name],
+ result[app.model.History.table.c.name]))
+ deleted_instance_count += 1
+ if not info_only and not email_only:
+ # Get the HistoryDatasetAssociation objects
+ hda = app.sa_session.query(
+ app.model.HistoryDatasetAssociation).get(hda_id)
+ if not hda.deleted:
+ # Mark the HistoryDatasetAssociation as deleted
+ hda.deleted = True
+ app.sa_session.add(hda)
+ print ("Marked HistoryDatasetAssociation id %d as "
+ "deleted" % hda.id)
+ app.sa_session.flush()
+
+ emailtemplate = Template(filename=template_file)
+ for (email, dataset_list) in user_notifications.iteritems():
+ msgtext = emailtemplate.render(email=email,
+ datasets=dataset_list,
+ cutoff=cutoff_days)
+ subject = "Galaxy Server Cleanup " \
+ "- %d datasets DELETED" % len(dataset_list)
+ fromaddr = config.error_email_to
+ print ""
+ print "From: %s" % fromaddr
+ print "To: %s" % email
+ print "Subject: %s" % subject
+ print "----------"
+ print msgtext
+ if not info_only:
+ #msg = MIMEText(msgtext)
+ #msg['Subject'] = subject
+ #msg['From'] = 'noone(a)nowhere.com'
+ #msg['To'] = email
+ galaxy.util.send_mail(fromaddr, email, subject,
+ msgtext, config)
+ #s = smtplib.SMTP(smtp_server)
+ #s.sendmail(['lparsons(a)princeton.edu'], email, msg.as_string())
+ #s.quit()
+
+ stop = time.time()
+ print ""
+ print "Marked %d dataset instances as deleted" % deleted_instance_count
+ print "Total elapsed time: ", stop - start
+ print "##########################################"
+
+
+if __name__ == "__main__":
+ main()
diff -r 18b23ed8de5b384f142db349e7379c03567758df -r 0bc38956530a382faea4b42e771037672c2a6b0c scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample
@@ -0,0 +1,11 @@
+Galaxy Server Cleanup
+---------------------
+The following datasets you own on Galaxy are older than ${cutoff} days and have been DELETED:
+
+% for dataset, history in datasets:
+ "${dataset}" in history "${history}"
+% endfor
+
+You may be able to undelete them by logging into Galaxy, navigating to the appropriate history, selecting "Include Deleted Datasets" from the history options menu, and clicking on the link to undelete each dataset that you want to keep. You can then download the datasets. Thank you for your understanding and cooporation in this necessary cleanup in order to keep the Galaxy resource available. Please don't hesitate to contact us if you have any questions.
+
+ -- Galaxy Administrators
diff -r 18b23ed8de5b384f142db349e7379c03567758df -r 0bc38956530a382faea4b42e771037672c2a6b0c scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample
@@ -0,0 +1,11 @@
+Galaxy Server Cleanup
+---------------------
+The following datasets you own on Galaxy are older than ${cutoff} days and will be deleted soon. Be sure to download any datasets you need to keep.
+
+% for dataset, history in datasets:
+ "${dataset}" in history "${history}"
+% endfor
+
+Please contact us if you have any questions.
+
+ -- Galaxy Administrators
https://bitbucket.org/galaxy/galaxy-central/commits/e4734c99812a/
Changeset: e4734c99812a
Branch: dataset-cleanup
User: lance_parsons
Date: 2013-04-18 21:43:00
Summary: Fix admin_cleanup_datasets.py for copied datasets
Affected #: 1 file
diff -r 0bc38956530a382faea4b42e771037672c2a6b0c -r e4734c99812ad2f240b2fa963ea9af3fa710bd74 scripts/cleanup_datasets/admin_cleanup_datasets.py
--- a/scripts/cleanup_datasets/admin_cleanup_datasets.py
+++ b/scripts/cleanup_datasets/admin_cleanup_datasets.py
@@ -17,7 +17,7 @@
Optional Arguments:
-d --days - number of days old the dataset must be (default: 60)
- --tool_id - string to search for in dataset tool_id
+ --tool_id - string to search for in dataset tool_id (default: all)
--template - Mako template file to use for email notification
-i --info_only - Print results, but don't email or delete anything
-e --email_only - Email notifications, but don't delete anything
@@ -47,8 +47,8 @@
log.addHandler(logging.StreamHandler(sys.stdout))
from cleanup_datasets import CleanupDatasetsApplication
-import pkg_resources
-pkg_resources.require("SQLAlchemy >= 0.4")
+#import pkg_resources
+#pkg_resources.require("SQLAlchemy >= 0.4")
#pkg_resources.require("Mako")
from mako.template import Template
@@ -77,8 +77,9 @@
parser = OptionParser()
parser.add_option("-d", "--days", dest="days", action="store",
type="int", help="number of days (60)", default=60)
- parser.add_option("--tool_id", default="",
- help="Text to match against tool_id")
+ parser.add_option("--tool_id", default=None,
+ help="Text to match against tool_id"
+ "Default: match all")
parser.add_option("--template", default=None,
help="Mako Template file to use as email "
"Variables are 'cutoff' for the cutoff in days, "
@@ -168,6 +169,7 @@
info_only=False):
# Marks dataset history association deleted and email users
start = time.time()
+ # Get HDAs older than cutoff time (ignore tool_id at this point)
# We really only need the id column here, but sqlalchemy barfs when
# trying to select only 1 column
hda_ids_query = sa.select(
@@ -177,20 +179,28 @@
app.model.Dataset.table.c.deleted == False,
app.model.HistoryDatasetAssociation.table.c.update_time
< cutoff_time,
- app.model.Job.table.c.tool_id.like("%%%s%%" % tool_id),
app.model.HistoryDatasetAssociation.table.c.deleted == False),
from_obj=[sa.outerjoin(
app.model.Dataset.table,
- app.model.HistoryDatasetAssociation.table)
- .outerjoin(app.model.JobToOutputDatasetAssociation.table)
- .outerjoin(app.model.Job.table)])
- deleted_instance_count = 0
- # skip = []
- user_notifications = defaultdict(list)
- # Add all datasets associated with Histories to our list
+ app.model.HistoryDatasetAssociation.table)])
+
+ # Add all datasets associated with Histories to our list
hda_ids = []
hda_ids.extend(
[row.id for row in hda_ids_query.execute()])
+
+ # Now find the tool_id that generated the dataset (even if it was copied)
+ tool_matched_ids = []
+ if tool_id is not None:
+ for hda_id in hda_ids:
+ this_tool_id = _get_tool_id_for_hda(app, hda_id)
+ if this_tool_id is not None and tool_id in this_tool_id:
+ tool_matched_ids.append(hda_id)
+ hda_ids = tool_matched_ids
+
+ deleted_instance_count = 0
+ user_notifications = defaultdict(list)
+
# Process each of the Dataset objects
for hda_id in hda_ids:
user_query = sa.select(
@@ -252,5 +262,22 @@
print "##########################################"
+def _get_tool_id_for_hda(app, hda_id):
+ # TODO Some datasets don't seem to have an entry in jtod or a copied_from
+ if hda_id is None:
+ return None
+ job = app.sa_session.query(app.model.Job).\
+ join(app.model.JobToOutputDatasetAssociation).\
+ filter(app.model.JobToOutputDatasetAssociation.table.c.dataset_id ==
+ hda_id).first()
+ if job is not None:
+ return job.tool_id
+ else:
+ hda = app.sa_session.query(app.model.HistoryDatasetAssociation).\
+ get(hda_id)
+ return _get_tool_id_for_hda(app, hda.
+ copied_from_history_dataset_association_id)
+
+
if __name__ == "__main__":
main()
https://bitbucket.org/galaxy/galaxy-central/commits/1e30cdb6d3b8/
Changeset: 1e30cdb6d3b8
User: dannon
Date: 2013-09-10 19:14:10
Summary: Merged in lance_parsons/galaxy-central-pull-requests/dataset-cleanup (pull request #158)
Basic administrative dataset cleanup script
Affected #: 3 files
diff -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 -r 1e30cdb6d3b82a68da8daddc86411eecc33be444 scripts/cleanup_datasets/admin_cleanup_datasets.py
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_datasets.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env python
+"""
+Mark datasets as deleted that are older than specified cutoff
+and (optionaly) with a tool_id that matches the specified search
+string.
+
+This script is useful for administrators to cleanup after users who
+leave many old datasets around. It was modeled after the cleanup_datasets.py
+script originally distributed with Galaxy.
+
+Basic Usage:
+ admin_cleanup_datasets.py universe_wsgi.ini -d 60 \
+ --template=email_template.txt
+
+Required Arguments:
+ config_file - the Galaxy configuration file (universe_wsgi.ini)
+
+Optional Arguments:
+ -d --days - number of days old the dataset must be (default: 60)
+ --tool_id - string to search for in dataset tool_id (default: all)
+ --template - Mako template file to use for email notification
+ -i --info_only - Print results, but don't email or delete anything
+ -e --email_only - Email notifications, but don't delete anything
+ Useful for notifying users of pending deletion
+
+ --smtp - Specify smtp server
+ If not specified, use smtp settings specified in config file
+ --fromaddr - Specify from address
+ If not specified, use error_email_to specified in config file
+
+Email Template Variables:
+ cutoff - the cutoff in days
+ email - the users email address
+ datasets - a list of tuples containing 'dataset' and 'history' names
+
+
+Author: Lance Parsons (lparsons(a)princeton.edu)
+"""
+import os
+import sys
+import shutil
+import logging
+from collections import defaultdict
+
+log = logging.getLogger()
+log.setLevel(10)
+log.addHandler(logging.StreamHandler(sys.stdout))
+
+from cleanup_datasets import CleanupDatasetsApplication
+#import pkg_resources
+#pkg_resources.require("SQLAlchemy >= 0.4")
+
+#pkg_resources.require("Mako")
+from mako.template import Template
+
+import time
+import ConfigParser
+from datetime import datetime, timedelta
+from time import strftime
+from optparse import OptionParser
+
+import galaxy.config
+import galaxy.model.mapping
+import sqlalchemy as sa
+from galaxy.model.orm import and_
+import galaxy.util
+
+assert sys.version_info[:2] >= (2, 4)
+
+
+def main():
+ """
+ Datasets that are older than the specified cutoff and for which the tool_id
+ contains the specified text will be marked as deleted in user's history and
+ the user will be notified by email using the specified template file.
+ """
+ parser = OptionParser()
+ parser.add_option("-d", "--days", dest="days", action="store",
+ type="int", help="number of days (60)", default=60)
+ parser.add_option("--tool_id", default=None,
+ help="Text to match against tool_id"
+ "Default: match all")
+ parser.add_option("--template", default=None,
+ help="Mako Template file to use as email "
+ "Variables are 'cutoff' for the cutoff in days, "
+ "'email' for users email and "
+ "'datasets' which is a list of tuples "
+ "containing 'dataset' and 'history' names. "
+ "Default: admin_cleanup_deletion_template.txt")
+ parser.add_option("-i", "--info_only", action="store_true",
+ dest="info_only", help="info about the requested action",
+ default=False)
+ parser.add_option("-e", "--email_only", action="store_true",
+ dest="email_only", help="Send emails only, don't delete",
+ default=False)
+ parser.add_option("--smtp", default=None,
+ help="SMTP Server to use to send email. "
+ "Default: [read from galaxy ini file]")
+ parser.add_option("--fromaddr", default=None,
+ help="From address to use to send email. "
+ "Default: [read from galaxy ini file]")
+ (options, args) = parser.parse_args()
+ ini_file = args[0]
+
+ config_parser = ConfigParser.ConfigParser({'here': os.getcwd()})
+ config_parser.read(ini_file)
+ config_dict = {}
+ for key, value in config_parser.items("app:main"):
+ config_dict[key] = value
+
+ if options.smtp is not None:
+ config_dict['smtp_server'] = options.smtp
+ if config_dict.get('smtp_server') is None:
+ parser.error("SMTP Server must be specified as an option (--smtp) "
+ "or in the config file (smtp_server)")
+
+ if options.fromaddr is not None:
+ config_dict['error_email_to'] = options.fromaddr
+ if config_dict.get('error_email_to') is None:
+ parser.error("From address must be specified as an option "
+ "(--fromaddr) or in the config file "
+ "(error_email_to)")
+
+ scriptdir = os.path.dirname(os.path.abspath(__file__))
+ template_file = options.template
+ if template_file is None:
+ default_template = os.path.join(scriptdir,
+ 'admin_cleanup_deletion_template.txt')
+ sample_template_file = "%s.sample" % default_template
+ if os.path.exists(default_template):
+ template_file = default_template
+ elif os.path.exists(sample_template_file):
+ print "Copying %s to %s" % (sample_template_file, default_template)
+ shutil.copyfile(sample_template_file, default_template)
+ template_file = default_template
+ else:
+ parser.error("Default template (%s) or sample template (%s) not "
+ "found, please specify template as an option "
+ "(--template)." % default_template,
+ sample_template_file)
+ elif not os.path.exists(template_file):
+ parser.error("Specified template file (%s) not found." % template_file)
+
+ config = galaxy.config.Configuration(**config_dict)
+
+ app = CleanupDatasetsApplication(config)
+ cutoff_time = datetime.utcnow() - timedelta(days=options.days)
+ now = strftime("%Y-%m-%d %H:%M:%S")
+
+ print "##########################################"
+ print "\n# %s - Handling stuff older than %i days" % (now, options.days)
+
+ if options.info_only:
+ print "# Displaying info only ( --info_only )\n"
+ elif options.email_only:
+ print "# Sending emails only, not deleting ( --email_only )\n"
+
+ administrative_delete_datasets(
+ app, cutoff_time, options.days, tool_id=options.tool_id,
+ template_file=template_file, config=config,
+ email_only=options.email_only, info_only=options.info_only)
+ app.shutdown()
+ sys.exit(0)
+
+
+def administrative_delete_datasets(app, cutoff_time, cutoff_days,
+ tool_id, template_file,
+ config, email_only=False,
+ info_only=False):
+ # Marks dataset history association deleted and email users
+ start = time.time()
+ # Get HDAs older than cutoff time (ignore tool_id at this point)
+ # We really only need the id column here, but sqlalchemy barfs when
+ # trying to select only 1 column
+ hda_ids_query = sa.select(
+ (app.model.HistoryDatasetAssociation.table.c.id,
+ app.model.HistoryDatasetAssociation.table.c.deleted),
+ whereclause=and_(
+ app.model.Dataset.table.c.deleted == False,
+ app.model.HistoryDatasetAssociation.table.c.update_time
+ < cutoff_time,
+ app.model.HistoryDatasetAssociation.table.c.deleted == False),
+ from_obj=[sa.outerjoin(
+ app.model.Dataset.table,
+ app.model.HistoryDatasetAssociation.table)])
+
+ # Add all datasets associated with Histories to our list
+ hda_ids = []
+ hda_ids.extend(
+ [row.id for row in hda_ids_query.execute()])
+
+ # Now find the tool_id that generated the dataset (even if it was copied)
+ tool_matched_ids = []
+ if tool_id is not None:
+ for hda_id in hda_ids:
+ this_tool_id = _get_tool_id_for_hda(app, hda_id)
+ if this_tool_id is not None and tool_id in this_tool_id:
+ tool_matched_ids.append(hda_id)
+ hda_ids = tool_matched_ids
+
+ deleted_instance_count = 0
+ user_notifications = defaultdict(list)
+
+ # Process each of the Dataset objects
+ for hda_id in hda_ids:
+ user_query = sa.select(
+ [app.model.HistoryDatasetAssociation.table,
+ app.model.History.table,
+ app.model.User.table],
+ whereclause=and_(
+ app.model.HistoryDatasetAssociation.table.c.id == hda_id),
+ from_obj=[sa.join(app.model.User.table,
+ app.model.History.table)
+ .join(app.model.HistoryDatasetAssociation.table)],
+ use_labels=True)
+ for result in user_query.execute():
+ user_notifications[result[app.model.User.table.c.email]].append(
+ (result[app.model.HistoryDatasetAssociation.table.c.name],
+ result[app.model.History.table.c.name]))
+ deleted_instance_count += 1
+ if not info_only and not email_only:
+ # Get the HistoryDatasetAssociation objects
+ hda = app.sa_session.query(
+ app.model.HistoryDatasetAssociation).get(hda_id)
+ if not hda.deleted:
+ # Mark the HistoryDatasetAssociation as deleted
+ hda.deleted = True
+ app.sa_session.add(hda)
+ print ("Marked HistoryDatasetAssociation id %d as "
+ "deleted" % hda.id)
+ app.sa_session.flush()
+
+ emailtemplate = Template(filename=template_file)
+ for (email, dataset_list) in user_notifications.iteritems():
+ msgtext = emailtemplate.render(email=email,
+ datasets=dataset_list,
+ cutoff=cutoff_days)
+ subject = "Galaxy Server Cleanup " \
+ "- %d datasets DELETED" % len(dataset_list)
+ fromaddr = config.error_email_to
+ print ""
+ print "From: %s" % fromaddr
+ print "To: %s" % email
+ print "Subject: %s" % subject
+ print "----------"
+ print msgtext
+ if not info_only:
+ #msg = MIMEText(msgtext)
+ #msg['Subject'] = subject
+ #msg['From'] = 'noone(a)nowhere.com'
+ #msg['To'] = email
+ galaxy.util.send_mail(fromaddr, email, subject,
+ msgtext, config)
+ #s = smtplib.SMTP(smtp_server)
+ #s.sendmail(['lparsons(a)princeton.edu'], email, msg.as_string())
+ #s.quit()
+
+ stop = time.time()
+ print ""
+ print "Marked %d dataset instances as deleted" % deleted_instance_count
+ print "Total elapsed time: ", stop - start
+ print "##########################################"
+
+
+def _get_tool_id_for_hda(app, hda_id):
+ # TODO Some datasets don't seem to have an entry in jtod or a copied_from
+ if hda_id is None:
+ return None
+ job = app.sa_session.query(app.model.Job).\
+ join(app.model.JobToOutputDatasetAssociation).\
+ filter(app.model.JobToOutputDatasetAssociation.table.c.dataset_id ==
+ hda_id).first()
+ if job is not None:
+ return job.tool_id
+ else:
+ hda = app.sa_session.query(app.model.HistoryDatasetAssociation).\
+ get(hda_id)
+ return _get_tool_id_for_hda(app, hda.
+ copied_from_history_dataset_association_id)
+
+
+if __name__ == "__main__":
+ main()
diff -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 -r 1e30cdb6d3b82a68da8daddc86411eecc33be444 scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample
@@ -0,0 +1,11 @@
+Galaxy Server Cleanup
+---------------------
+The following datasets you own on Galaxy are older than ${cutoff} days and have been DELETED:
+
+% for dataset, history in datasets:
+ "${dataset}" in history "${history}"
+% endfor
+
+You may be able to undelete them by logging into Galaxy, navigating to the appropriate history, selecting "Include Deleted Datasets" from the history options menu, and clicking on the link to undelete each dataset that you want to keep. You can then download the datasets. Thank you for your understanding and cooporation in this necessary cleanup in order to keep the Galaxy resource available. Please don't hesitate to contact us if you have any questions.
+
+ -- Galaxy Administrators
diff -r 69bec229bdf85900f7d581e735aa69ee2cc2aeb9 -r 1e30cdb6d3b82a68da8daddc86411eecc33be444 scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample
@@ -0,0 +1,11 @@
+Galaxy Server Cleanup
+---------------------
+The following datasets you own on Galaxy are older than ${cutoff} days and will be deleted soon. Be sure to download any datasets you need to keep.
+
+% for dataset, history in datasets:
+ "${dataset}" in history "${history}"
+% endfor
+
+Please contact us if you have any questions.
+
+ -- Galaxy Administrators
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/82833b9b6f4f/
Changeset: 82833b9b6f4f
User: Dave Bouvier
Date: 2013-09-09 20:48:35
Summary: Make the "keep tool dependencies" feature for the install and test framework optional.
Affected #: 3 files
diff -r ed3c786622741814a07589a1e642cca75c51c822 -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 test/install_and_test_tool_shed_repositories/base/twilltestcase.py
--- a/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
+++ b/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
@@ -136,11 +136,17 @@
break
time.sleep( 1 )
- def uninstall_repository( self, installed_repository ):
+ def uninstall_repository( self, installed_repository, deactivate_only=False ):
url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_url( url )
- tc.fv ( 1, "remove_from_disk", 'false' )
+ if deactivate_only:
+ tc.fv ( 1, "remove_from_disk", 'false' )
+ else:
+ tc.fv ( 1, "remove_from_disk", 'true' )
tc.submit( 'deactivate_or_uninstall_repository_button' )
strings_displayed = [ 'The repository named' ]
- strings_displayed.append( 'has been deactivated' )
- self.check_for_strings( strings_displayed, strings_not_displayed=[] )
+ if deactivate_only:
+ strings_displayed.append( 'has been deactivated' )
+ else:
+ strings_displayed.append( 'has been uninstalled' )
+ self.check_for_strings( strings_displayed, strings_not_displayed=[] )
\ No newline at end of file
diff -r ed3c786622741814a07589a1e642cca75c51c822 -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py
--- a/test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py
+++ b/test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py
@@ -16,7 +16,7 @@
# Install the repository through the web interface using twill.
self.install_repository( repository_info_dict )
- def do_uninstallation( self, repository_info_dict ):
+ def do_uninstallation( self, repository_info_dict, deactivate_only=False ):
self.logout()
self.login( email='test(a)bx.psu.edu', username='test' )
admin_user = test_db_util.get_user( 'test(a)bx.psu.edu' )
@@ -27,7 +27,7 @@
repository_info_dict[ 'changeset_revision' ] )
admin_user_private_role = test_db_util.get_private_role( admin_user )
# Uninstall the repository through the web interface using twill.
- self.uninstall_repository( repository )
+ self.uninstall_repository( repository, deactivate_only )
def generate_install_method( repository_dict=None ):
"""Generate abstract test cases for the defined list of repositories."""
@@ -55,7 +55,7 @@
new_class_obj = new.classobj( name, baseclasses, namespace )
G[ name ] = new_class_obj
-def generate_uninstall_method( repository_dict=None ):
+def generate_uninstall_method( repository_dict=None, deactivate_only=False ):
"""Generate abstract test cases for the defined list of repositories."""
if repository_dict is None:
return
@@ -63,20 +63,20 @@
G = globals()
# Eliminate all previous tests from G.
for key, val in G.items():
- if key.startswith( 'TestInstallRepository_' ) or key.startswith( 'TestUninstallRepository_' ) or key.startswith( 'TestForTool_' ):
+ if key.startswith( 'TestInstallRepository_' ) or key.startswith( 'TestForTool_' ):
del G[ key ]
# Create a new subclass with a method named install_repository_XXX that installs the repository specified by the provided dict.
- name = "TestUninstallRepository_" + repository_dict[ 'name' ]
+ name = "TestUninstallRepository_%s_%s" % ( repository_dict[ 'name' ], repository_dict[ 'changeset_revision' ] )
baseclasses = ( InstallTestRepositories, )
namespace = dict()
def make_uninstall_method( repository_dict ):
def test_install_repository( self ):
- self.do_uninstallation( repository_dict )
+ self.do_uninstallation( repository_dict, deactivate_only )
return test_install_repository
test_method = make_uninstall_method( repository_dict )
test_method.__doc__ = "Uninstall the repository %s." % repository_dict[ 'name' ]
- namespace[ 'uninstall_repository_%s' % repository_dict[ 'name' ] ] = test_method
+ namespace[ 'uninstall_repository_%s_%s' % ( repository_dict[ 'name' ], repository_dict[ 'changeset_revision' ] ) ] = test_method
# The new.classobj function returns a new class object, with name name, derived
# from baseclasses (which should be a tuple of classes) and with namespace dict.
new_class_obj = new.classobj( name, baseclasses, namespace )
- G[ name ] = new_class_obj
+ G[ name ] = new_class_obj
\ No newline at end of file
diff -r ed3c786622741814a07589a1e642cca75c51c822 -r 82833b9b6f4fbdf1408864bbf7781a02200ca633 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# NOTE: This script cannot be run directly, because it needs to have test/functional/test_toolbox.py in sys.argv in
+# NOTE: This script cannot be run directly, because it needs to have test/functional/test_toolbox.py in sys.argv in
# order to run functional tests on repository tools after installation. The install_and_test_tool_shed_repositories.sh
# will execute this script with the appropriate parameters.
@@ -157,7 +157,7 @@
<tables></tables>
'''
-
+
# The tool shed url and api key must be set for this script to work correctly. Additionally, if the tool shed url does not
# point to one of the defaults, the GALAXY_INSTALL_TEST_TOOL_SHEDS_CONF needs to point to a tool sheds configuration file
# that contains a definition for that tool shed.
@@ -169,7 +169,7 @@
if tool_shed_api_key is None:
print "This script requires the GALAXY_INSTALL_TEST_TOOL_SHED_API_KEY environment variable to be set and non-empty."
exit( 1 )
-
+
if galaxy_tool_shed_url is None:
print "This script requires the GALAXY_INSTALL_TEST_TOOL_SHED_URL environment variable to be set and non-empty."
exit( 1 )
@@ -188,12 +188,12 @@
testing_single_repository[ 'changeset_revision' ] = os.environ[ 'repository_revision' ]
else:
testing_single_repository[ 'changeset_revision' ] = None
-
+
class ReportResults( Plugin ):
'''Simple Nose plugin to record the IDs of all tests run, regardless of success.'''
name = "reportresults"
passed = dict()
-
+
def options( self, parser, env=os.environ ):
super( ReportResults, self ).options( parser, env=env )
@@ -224,7 +224,7 @@
return passed_tests
return []
-def execute_uninstall_method( app ):
+def execute_uninstall_method( app, deactivate_only=False ):
# Clean out any generated tests.
remove_generated_tests( app )
sa_session = app.model.context.current
@@ -244,11 +244,11 @@
log.debug( 'Changeset revision %s of repository %s queued for uninstallation.', changeset_revision, name )
repository_dict = dict( name=name, owner=owner, changeset_revision=changeset_revision )
# Generate a test method to uninstall this repository through the embedded Galaxy application's web interface.
- test_install_repositories.generate_uninstall_method( repository_dict )
+ test_install_repositories.generate_uninstall_method( repository_dict, deactivate_only )
# Set up nose to run the generated uninstall method as a functional test.
test_config = nose.config.Config( env=os.environ, plugins=nose.plugins.manager.DefaultPluginManager() )
test_config.configure( sys.argv )
- # Run the uninstall method. This method uses the Galaxy web interface to uninstall the previously installed
+ # Run the uninstall method. This method uses the Galaxy web interface to uninstall the previously installed
# repository and delete it from disk.
result, _ = run_tests( test_config )
success = result.wasSuccessful()
@@ -258,7 +258,7 @@
if 'api' in parts and parts.index( 'api' ) != 0:
parts.pop( parts.index( 'api' ) )
parts.insert( 0, 'api' )
- elif 'api' not in parts:
+ elif 'api' not in parts:
parts.insert( 0, 'api' )
url = url_join( base, *parts )
if key:
@@ -318,8 +318,8 @@
NOTE: If the tool shed URL specified in any dict is not present in the tool_sheds_conf.xml, the installation will fail.
'''
assert tool_shed_api_key is not None, 'Cannot proceed without tool shed API key.'
- params = urllib.urlencode( dict( do_not_test='false',
- downloadable='true',
+ params = urllib.urlencode( dict( do_not_test='false',
+ downloadable='true',
malicious='false',
includes_tools='true',
skip_tool_test='false' ) )
@@ -334,7 +334,7 @@
repository_info_dict = get_repository_info_from_api( galaxy_tool_shed_url, repository_to_install_dict )
if repository_info_dict[ 'latest_revision' ] == '000000000000':
continue
- owner = repository_info_dict[ 'owner' ]
+ owner = repository_info_dict[ 'owner' ]
name = repository_info_dict[ 'name' ]
changeset_revision = repository_to_install_dict[ 'changeset_revision' ]
repository_id = repository_to_install_dict[ 'repository_id' ]
@@ -343,9 +343,9 @@
# and therefore do not need to be checked. If they are undeleted, this script will then test them the next time it runs.
if repository_info_dict[ 'deleted' ]:
log.info( "Skipping revision %s of repository id %s (%s/%s) since the repository is deleted...",
- changeset_revision,
- repository_id,
- name,
+ changeset_revision,
+ repository_id,
+ name,
owner )
continue
# Now merge the dict returned from /api/repository_revisions with the detailed dict we just retrieved.
@@ -360,8 +360,8 @@
else:
skipped_previous = ''
if testing_single_repository:
- log.info( 'Testing single repository with name %s and owner %s.',
- testing_single_repository[ 'name' ],
+ log.info( 'Testing single repository with name %s and owner %s.',
+ testing_single_repository[ 'name' ],
testing_single_repository[ 'owner' ])
for repository_to_install in detailed_repository_list:
if repository_to_install[ 'name' ] == testing_single_repository[ 'name' ] \
@@ -415,7 +415,7 @@
[
{
'reason': The default reason or the reason specified in this section,
- 'repositories':
+ 'repositories':
[
( name, owner, changeset revision if changeset revision else None ),
( name, owner, changeset revision if changeset revision else None )
@@ -423,7 +423,7 @@
},
{
'reason': The default reason or the reason specified in this section,
- 'repositories':
+ 'repositories':
[
( name, owner, changeset revision if changeset revision else None ),
( name, owner, changeset revision if changeset revision else None )
@@ -473,8 +473,8 @@
return update( tool_shed_api_key, '%s' % ( url_join( galaxy_tool_shed_url, 'api', 'repository_revisions', metadata_id ) ), params, return_formatted=False )
def remove_generated_tests( app ):
- # Delete any configured tool functional tests from the test_toolbox.__dict__, otherwise nose will find them
- # and try to re-run the tests after uninstalling the repository, which will cause false failure reports,
+ # Delete any configured tool functional tests from the test_toolbox.__dict__, otherwise nose will find them
+ # and try to re-run the tests after uninstalling the repository, which will cause false failure reports,
# since the test data has been deleted from disk by now.
tests_to_delete = []
tools_to_delete = []
@@ -525,7 +525,7 @@
# ---- Configuration ------------------------------------------------------
galaxy_test_host = os.environ.get( 'GALAXY_INSTALL_TEST_HOST', default_galaxy_test_host )
galaxy_test_port = os.environ.get( 'GALAXY_INSTALL_TEST_PORT', str( default_galaxy_test_port_max ) )
-
+
tool_path = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_PATH', 'tools' )
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales
@@ -536,6 +536,11 @@
if not os.path.isdir( galaxy_test_tmp_dir ):
os.mkdir( galaxy_test_tmp_dir )
galaxy_test_proxy_port = None
+ # Allow the option to keep or delete tool dependencies when a repository has been tested.
+ if 'GALAXY_INSTALL_TEST_KEEP_TOOL_DEPENDENCIES' in os.environ:
+ deactivate_only = True
+ else:
+ deactivate_only = False
# Set up the configuration files for the Galaxy instance.
shed_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF', os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_data_table_conf.xml' ) )
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF', tool_data_table_conf )
@@ -553,19 +558,19 @@
# Configure the database connection and path.
if 'GALAXY_INSTALL_TEST_DBPATH' in os.environ:
galaxy_db_path = os.environ[ 'GALAXY_INSTALL_TEST_DBPATH' ]
- else:
+ else:
tempdir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_db_path = os.path.join( tempdir, 'database' )
# Configure the paths Galaxy needs to install and test tools.
galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
+ galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
# Set up the tool dependency path for the Galaxy instance.
tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
if tool_dependency_dir is None:
- tool_dependency_dir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ tool_dependency_dir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' ] = tool_dependency_dir
os.environ[ 'GALAXY_TOOL_DEPENDENCY_DIR' ] = tool_dependency_dir
if 'GALAXY_INSTALL_TEST_DBURI' in os.environ:
@@ -639,13 +644,13 @@
# Set the global_conf[ '__file__' ] option to the location of the temporary .ini file, which gets passed to set_metadata.sh.
kwargs[ 'global_conf' ] = get_webapp_global_conf()
kwargs[ 'global_conf' ][ '__file__' ] = galaxy_config_file
- # ---- Build Galaxy Application --------------------------------------------------
+ # ---- Build Galaxy Application --------------------------------------------------
if not database_connection.startswith( 'sqlite://' ):
kwargs[ 'database_engine_option_max_overflow' ] = '20'
kwargs[ 'database_engine_option_pool_size' ] = '10'
kwargs[ 'config_file' ] = galaxy_config_file
app = UniverseApplication( **kwargs )
-
+
log.info( "Embedded Galaxy application started" )
# ---- Run galaxy webserver ------------------------------------------------------
@@ -703,7 +708,7 @@
if additional_tool_data_tables:
app.tool_data_tables.add_new_entries_from_config_file( config_filename=additional_tool_data_tables,
tool_data_path=additional_tool_data_path,
- shed_tool_data_table_config=None,
+ shed_tool_data_table_config=None,
persist=False )
# Initialize some variables for the summary that will be printed to stdout.
repositories_passed = []
@@ -784,14 +789,14 @@
# Iterate through the list of repositories defined not to be installed. This should be a list of dicts in the following format:
# {
# 'reason': The default reason or the reason specified in this section,
- # 'repositories':
+ # 'repositories':
# [
# ( name, owner, changeset revision if changeset revision else None ),
# ( name, owner, changeset revision if changeset revision else None )
# ]
# },
# If changeset revision is None, that means the entire repository is excluded from testing, otherwise only the specified
- # revision should be skipped.
+ # revision should be skipped.
# TODO: When a repository is selected to be skipped, use the API to update the tool shed with the defined skip reason.
skip_this_repository = False
skip_because = None
@@ -831,11 +836,11 @@
log.exception( 'Error getting installed repository.' )
success = False
pass
- # If the installation succeeds, configure and run functional tests for this repository. This is equivalent to
+ # If the installation succeeds, configure and run functional tests for this repository. This is equivalent to
# sh run_functional_tests.sh -installed
if success:
log.debug( 'Installation of %s succeeded, running all defined functional tests.', name )
- # Generate the shed_tools_dict that specifies the location of test data contained within this repository. If the repository
+ # Generate the shed_tools_dict that specifies the location of test data contained within this repository. If the repository
# does not have a test-data directory, this will return has_test_data = False, and we will set the do_not_test flag to True,
# and the tools_functionally_correct flag to False, as well as updating tool_test_results.
file( galaxy_shed_tools_dict, 'w' ).write( to_json_string( dict() ) )
@@ -876,8 +881,8 @@
# 'tool_dependencies':
# [
# {
- # 'type': 'Type of tool dependency, e.g. package, set_environment, etc.',
- # 'name': 'Name of the tool dependency.',
+ # 'type': 'Type of tool dependency, e.g. package, set_environment, etc.',
+ # 'name': 'Name of the tool dependency.',
# 'version': 'Version if this is a package, otherwise blank.',
# 'error_message': 'The error message returned when installation was attempted.',
# },
@@ -885,8 +890,8 @@
# 'repository_dependencies':
# [
# {
- # 'tool_shed': 'The tool shed that this repository was installed from.',
- # 'name': 'The name of the repository that failed to install.',
+ # 'tool_shed': 'The tool shed that this repository was installed from.',
+ # 'name': 'The name of the repository that failed to install.',
# 'owner': 'Owner of the failed repository.',
# 'changeset_revision': 'Changeset revision of the failed repository.',
# 'error_message': 'The error message that was returned when the repository failed to install.',
@@ -895,8 +900,8 @@
# 'current_repository':
# [
# {
- # 'tool_shed': 'The tool shed that this repository was installed from.',
- # 'name': 'The name of the repository that failed to install.',
+ # 'tool_shed': 'The tool shed that this repository was installed from.',
+ # 'name': 'The name of the repository that failed to install.',
# 'owner': 'Owner of the failed repository.',
# 'changeset_revision': 'Changeset revision of the failed repository.',
# 'error_message': 'The error message that was returned when the repository failed to install.',
@@ -919,7 +924,7 @@
# },
# ]
# "not_tested":
- # {
+ # {
# "reason": "The Galaxy development team has determined that this repository should not be installed and tested by the automated framework."
# }
# }
@@ -949,14 +954,14 @@
params[ 'tools_functionally_correct' ] = False
params[ 'missing_test_components' ] = True
params[ 'do_not_test' ] = str( set_do_not_test )
- register_test_result( galaxy_tool_shed_url,
- metadata_revision_id,
- repository_status,
- repository_info_dict,
+ register_test_result( galaxy_tool_shed_url,
+ metadata_revision_id,
+ repository_status,
+ repository_info_dict,
params )
# Run the cleanup method. This removes tool functional test methods from the test_toolbox module and uninstalls the
# repository using Twill.
- execute_uninstall_method( app )
+ execute_uninstall_method( app, deactivate_only )
# Set the test_toolbox.toolbox module-level variable to the new app.toolbox.
test_toolbox.toolbox = app.toolbox
repositories_failed.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
@@ -968,15 +973,15 @@
log.error( 'Updating repository and skipping functional tests.' )
# In keeping with the standard display layout, add the error message to the dict for each tool individually.
for dependency in failed_tool_dependencies:
- test_result = dict( type=dependency.type,
- name=dependency.name,
+ test_result = dict( type=dependency.type,
+ name=dependency.name,
version=dependency.version,
error_message=dependency.error_message )
repository_status[ 'installation_errors' ][ 'tool_dependencies' ].append( test_result )
for dependency in repository.repository_dependencies_with_installation_errors:
- test_result = dict( tool_shed=dependency.tool_shed,
- name=dependency.name,
- owner=dependency.owner,
+ test_result = dict( tool_shed=dependency.tool_shed,
+ name=dependency.name,
+ owner=dependency.owner,
changeset_revision=dependency.changeset_revision,
error_message=dependency.error_message )
repository_status[ 'installation_errors' ][ 'repository_dependencies' ].append( test_result )
@@ -984,14 +989,19 @@
params[ 'tools_functionally_correct' ] = False
params[ 'do_not_test' ] = False
params[ 'test_install_error' ] = True
- register_test_result( galaxy_tool_shed_url,
- metadata_revision_id,
- repository_status,
- repository_info_dict,
+ register_test_result( galaxy_tool_shed_url,
+ metadata_revision_id,
+ repository_status,
+ repository_info_dict,
params )
# Run the cleanup method. This removes tool functional test methods from the test_toolbox module and uninstalls the
- # repository using Twill.
- execute_uninstall_method( app )
+ # repository using Twill. If tool dependencies failed installation, select to uninstall instead of deavctivate,
+ # to make way for the next attempt. Otherwise, default to the value determined by the environment variable
+ # GALAXY_INSTALL_TEST_KEEP_TOOL_DEPENDENCIES.
+ if failed_tool_dependencies:
+ execute_uninstall_method( app, deactivate_only=False )
+ else:
+ execute_uninstall_method( app, deactivate_only=deactivate_only )
# Set the test_toolbox.toolbox module-level variable to the new app.toolbox.
test_toolbox.toolbox = app.toolbox
repositories_failed_install.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
@@ -1007,7 +1017,7 @@
os.environ[ 'GALAXY_TEST_PORT' ] = galaxy_test_port
# Set the module-level variable 'toolbox', so that test.functional.test_toolbox will generate the appropriate test methods.
test_toolbox.toolbox = app.toolbox
- # Generate the test methods for this installed repository. We need to pass in True here, or it will look
+ # Generate the test methods for this installed repository. We need to pass in True here, or it will look
# in $GALAXY_HOME/test-data for test data, which may result in missing or invalid test files.
test_toolbox.build_tests( testing_shed_tools=True )
# Set up nose to run the generated functional tests.
@@ -1037,10 +1047,10 @@
params[ 'tools_functionally_correct' ] = True
params[ 'do_not_test' ] = False
params[ 'test_install_error' ] = False
- register_test_result( galaxy_tool_shed_url,
- metadata_revision_id,
- repository_status,
- repository_info_dict,
+ register_test_result( galaxy_tool_shed_url,
+ metadata_revision_id,
+ repository_status,
+ repository_info_dict,
params )
log.debug( 'Revision %s of repository %s installed and passed functional tests.', changeset_revision, name )
else:
@@ -1081,7 +1091,7 @@
if output_type in tmp_output:
test_status[ output_type ] = '\n'.join( tmp_output[ output_type ] )
repository_status[ 'failed_tests' ].append( test_status )
- # Call the register_test_result method, which executes a PUT request to the repository_revisions API controller with the outcome
+ # Call the register_test_result method, which executes a PUT request to the repository_revisions API controller with the outcome
# of the tests, and updates tool_test_results with the relevant log data.
# This also sets the do_not_test and tools_functionally correct flags to the appropriate values, and updates the time_last_tested
# field to today's date.
@@ -1090,19 +1100,19 @@
params[ 'tools_functionally_correct' ] = False
params[ 'test_install_error' ] = False
params[ 'do_not_test' ] = str( set_do_not_test )
- register_test_result( galaxy_tool_shed_url,
- metadata_revision_id,
- repository_status,
- repository_info_dict,
+ register_test_result( galaxy_tool_shed_url,
+ metadata_revision_id,
+ repository_status,
+ repository_info_dict,
params )
log.debug( 'Revision %s of repository %s installed successfully, but did not pass functional tests.',
- changeset_revision, name )
+ changeset_revision, name )
# Run the uninstall method. This removes tool functional test methods from the test_toolbox module and uninstalls the
# repository using Twill.
log.debug( 'Uninstalling changeset revision %s of repository %s',
- repository_info_dict[ 'changeset_revision' ],
+ repository_info_dict[ 'changeset_revision' ],
repository_info_dict[ 'name' ] )
- success = execute_uninstall_method( app )
+ success = execute_uninstall_method( app, deactivate_only )
if not success:
log.error( 'Repository %s failed to uninstall.', repository_info_dict[ 'name' ] )
# Set the test_toolbox.toolbox module-level variable to the new app.toolbox.
@@ -1115,28 +1125,28 @@
except:
log.exception( 'Unable to uninstall, no installed repository found.' )
continue
- test_result = dict( tool_shed=repository.tool_shed,
- name=repository.name,
- owner=repository.owner,
+ test_result = dict( tool_shed=repository.tool_shed,
+ name=repository.name,
+ owner=repository.owner,
changeset_revision=repository.changeset_revision,
error_message=repository.error_message )
repository_status[ 'installation_errors' ][ 'repository_dependencies' ].append( test_result )
params[ 'tools_functionally_correct' ] = False
params[ 'test_install_error' ] = True
params[ 'do_not_test' ] = False
- register_test_result( galaxy_tool_shed_url,
- metadata_revision_id,
- repository_status,
- repository_info_dict,
+ register_test_result( galaxy_tool_shed_url,
+ metadata_revision_id,
+ repository_status,
+ repository_info_dict,
params )
- success = execute_uninstall_method( app )
+ success = execute_uninstall_method( app, deactivate_only )
if not success:
log.error( 'Repository %s failed to uninstall.', repository_info_dict[ 'name' ] )
repositories_failed_install.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
log.debug( 'Repository %s failed to install correctly.', repository_info_dict[ 'name' ] )
except:
log.exception( "Failure running tests" )
-
+
log.info( "Shutting down" )
# ---- Tear down -----------------------------------------------------------
# Gracefully shut down the embedded web server and UniverseApplication.
@@ -1191,7 +1201,7 @@
# Normally, the value of 'success' would determine whether this test suite is marked as passed or failed
# in the automated buildbot framework. However, due to the procedure used here, we only want to report
# failure if a repository fails to install correctly. Therefore, we have overriden the value of 'success'
- # here based on what actions the script has executed.
+ # here based on what actions the script has executed.
if success:
return 0
else:
@@ -1202,4 +1212,4 @@
print "####################################################################################"
print "# %s - running repository installation and testing script." % now
print "####################################################################################"
- sys.exit( main() )
+ sys.exit( main() )
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.