7 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/be5107c0321d/ Changeset: be5107c0321d Branch: search User: kellrott Date: 2013-01-15 07:39:31 Summary: Adding in the starting work for a unified search system. Affected #: 7 files diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 lib/galaxy/model/search.py --- /dev/null +++ b/lib/galaxy/model/search.py @@ -0,0 +1,242 @@ +""" +Classes for implmenting search methods for various data models +""" + +import logging +from galaxy.model.orm import * +from galaxy.model import * + +from sqlalchemy import or_, and_ + +log = logging.getLogger( __name__ ) + + +""" + +The search model is a simplified view of the SQL schema. A search domain +typicaly covers a base data table, but it's fields can be constructs of multiple +tables that are joined togeather. For example, a History dataset may have tags associated with it. +The Search class simplifies the joining process between the history dataset and the tags table, +and presents the 'tag' field as a single concept that can be filtered against. + + +""" + +class SearchField(object): + + def __init__(self, name): + self.name = name + self.other = None + self.mode = None + + def __eq__(self, other ): + self.other = other + self.mode = "==" + return self + + def like(self, other): + self.other = other + self.mode = "like" + return self + +class QueryBaseClass(object): + OUTPUT_COLUMNS = [] + + def __init__(self, base_query): + self.query = base_query + self.do_query = False + + def get_results(self, force_query=False): + if self.query is not None and (force_query or self.do_query): + print self.query + for row in self.query.distinct().all(): + out = {} + for col in self.OUTPUT_COLUMNS: + out[col] = getattr(row, col) + yield out + + +class SearchBaseClass(object): + + def get_field(self, name): + for f_obj in self.FIELDS: + if isinstance(f_obj, SearchField): + if f_obj.name == name: + return f_obj + else: + print "Not SearchField", f_obj + return None + + @staticmethod + def search(trans): + return None + + +################## +#Library Searching +################## + +class LibraryQuery(QueryBaseClass): + OUTPUT_COLUMNS = [ 'extended_metadata', 'name', 'id' ] + def filter(self, arg): + if arg.name == 'extended_metadata': + self.do_query = True + self.query = self.query.join( ExtendedMetadata ) + ex_meta = arg.other + for f in ex_meta: + alias = aliased( ExtendedMetadataIndex ) + self.query = self.query.filter( + and_( + ExtendedMetadata.id == alias.extended_metadata_id, + alias.path == "/" + f, + alias.value == str(ex_meta[f]) + ) + ) + + +class LibrarySearch(SearchBaseClass): + FIELDS = [ + SearchField("name"), + SearchField("id"), + SearchField("extended_metadata") + ] + + @staticmethod + def search(trans): + query = trans.sa_session.query( LibraryDatasetDatasetAssociation ) + return LibraryQuery(query) + +################## +#History Dataset Searching +################## + + +class HistoryDatasetQuery(QueryBaseClass): + OUTPUT_COLUMNS = ['name', 'id'] + + def filter(self, arg): + if arg.name == 'name': + if arg.mode == "==": + self.do_query = True + self.query= self.query.filter( HistoryDatasetAssociation.name == arg.other ) + if arg.mode == "like": + self.do_query = True + self.query= self.query.filter( HistoryDatasetAssociation.name.like(arg.other) ) + +class HistoryDatasetSearch(SearchBaseClass): + FIELDS = [ + SearchField("name") + ] + @staticmethod + def search(trans): + query = trans.sa_session.query( HistoryDatasetAssociation ) + return HistoryDatasetQuery(query) + + +################## +#History Searching +################## + + +class HistoryQuery(QueryBaseClass): + OUTPUT_COLUMNS = ['name', 'id'] + + def filter(self, arg): + if arg.name == 'name': + if arg.mode == "==": + self.do_query = True + self.query = self.query.filter( History.name == arg.other ) + if arg.mode == "like": + self.do_query = True + self.query = self.query.filter( History.name.like(arg.other) ) + + if arg.name == 'tag': + self.do_query = True + self.query = self.query.filter( + History.id == HistoryTagAssociation.history_id + ) + tmp = arg.other.split(":") + self.query = self.query.filter( HistoryTagAssociation.user_tname == tmp[0] ) + if len(tmp) > 1: + self.query = self.query.filter( HistoryTagAssociation.user_value == tmp[1] ) + + if arg.name == 'annotation': + if arg.mode == "==": + self.do_query = True + self.query = self.query.filter( and_( + HistoryAnnotationAssociation.history_id == History.id, + HistoryAnnotationAssociation.annotation == arg.other + ) + ) + + if arg.mode == "like": + self.do_query = True + self.query = self.query.filter( and_( + HistoryAnnotationAssociation.history_id == History.id, + HistoryAnnotationAssociation.annotation.like( arg.other ) + ) + ) + + +class HistorySearch(SearchBaseClass): + FIELDS = [ + SearchField("name"), + SearchField("tag"), + SearchField("annotation") + ] + + @staticmethod + def search(trans): + query = trans.sa_session.query( History ) + return HistoryQuery(query) + +################## +#Workflow Searching +################## + + + +class WorkflowQuery(QueryBaseClass): + OUTPUT_COLUMNS = ['name', 'id'] + + def filter(self, arg): + if arg.name == 'name': + self.do_query = True + self.query = self.query.filter( StoredWorkflow.name == arg.other ) + if arg.name == 'tag': + self.do_query = True + self.query = self.query.filter( and_( + Tag.name == arg.other, + Tag.id == StoredWorkflowTagAssociation.tag_id, + StoredWorkflowTagAssociation.stored_workflow_id == StoredWorkflow.id ) + ) + self.query = self.query.filter( + Workflow.id == StoredWorkflowTagAssociation.workflow_id + ) + tmp = arg.other.split(":") + self.query = self.query.filter( StoredWorkflowTagAssociation.user_tname == tmp[0] ) + if len(tmp) > 1: + self.query = self.query.filter( StoredWorkflowTagAssociation.user_value == tmp[1] ) + + +class WorkflowSearch(SearchBaseClass): + + FIELDS = [ + SearchField("name"), + SearchField("tag") + ] + + @staticmethod + def search(trans): + query = trans.sa_session.query( StoredWorkflow ) + return WorkflowQuery(query) + + +search_mapping = { + 'library' : LibrarySearch, + 'history_dataset' : HistoryDatasetSearch, + 'history' : HistorySearch, + 'workflow' : WorkflowSearch +} + + diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 lib/galaxy/webapps/galaxy/api/search.py --- /dev/null +++ b/lib/galaxy/webapps/galaxy/api/search.py @@ -0,0 +1,113 @@ +""" +API for searching Galaxy Datasets +""" +import logging, os, string, shutil, urllib, re, socket +from cgi import escape, FieldStorage +from galaxy import util, datatypes, jobs, web, util +from galaxy.web.base.controller import * +from galaxy.util.sanitize_html import sanitize_html +from galaxy.model.orm import * +from galaxy.model.search import * + + +from galaxy.model import * + +from sqlalchemy import or_, and_ + +log = logging.getLogger( __name__ ) + +class SearchController( BaseAPIController ): + """ + The search API accepts a POST with a payload of: + - search_type : The type of data to be searched, "library", "history_dataset", "history", "workflow" + - result_fields : The fields in the return structure that should be populated, includes: + "name", "id", "extended_metadata", "annotation", "tags" + - query : The query structure to be used + + Example query data structures: + + Find elements such that name == "test" + { + "name" : "test" + } + + + Find elements such that annotation contains the work 'world' + + { + "annotation" : { "$like" : "%world%" } + } + + Find elements such that the extended metadata field dataSubType/@id == 'geneExp' + + { + "extended_metadata" : { + "dataSubType/@id" : "geneExp" + } + } + + """ + + FIELD_NAME = "name" + FIELD_ID = "id" + FIELD_EXTENDED_METADATA = "extended_metadata" + FIELD_ANNOTATION = "annotation" + FIELD_TAGS = "tags" + + @web.expose_api + def create( self, trans, payload, **kwd ): + """ + POST /api/search + Do a search of the various elements of Galaxy. + """ + domains = payload.get("domain", []) + result_fields = payload.get("result_fields", [self.FIELD_EXTENDED_METADATA]) + query = payload.get("query", {}) + print payload + out = [] + for domain in domains: + if domain in search_mapping: + search_base = search_mapping[domain]() + search_query = search_base.search(trans) + for field in query: + search_field = search_base.get_field(field) + if search_field is not None: + if isinstance(query[field], dict) and '$like' in query[field]: + search_query.filter( search_field.like(query[field]['$like']) ) + else: + search_query.filter( search_field == query[field] ) + out += list(search_query.get_results()) + + return self._create_response(trans, out, result_fields) + + def _create_response(self, trans, rows, result_fields): + out = [] + for row in rows: + o = {} + o[ self.FIELD_ID ] = trans.security.encode_id(row['id']) + o[ self.FIELD_NAME ] = row['name'] + + if self.FIELD_EXTENDED_METADATA in result_fields and 'extended_metadata' in row: + o[ self.FIELD_EXTENDED_METADATA ] = row['extended_metadata.data'] + + if self.FIELD_ANNOTATION in result_fields: + try: + o[self.FIELD_ANNOTATION] = [] + for a in row['annotations']: + o[self.FIELD_ANNOTATION].append(a.annotation) + except AttributeError: + del o[self.FIELD_ANNOTATION] + + if self.FIELD_TAGS in result_fields: + try: + o[self.FIELD_TAGS] = [] + for t in row['tags']: + s = t.user_tname + if t.user_value is not None: + s += ":" + t.user_value + o[self.FIELD_TAGS].append(s) + except AttributeError: + del o[self.FIELD_TAGS] + + out.append(o) + return out diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -59,6 +59,7 @@ webapp.add_route( '/u/:username/h/:slug', controller='history', action='display_by_username_and_slug' ) webapp.add_route( '/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' ) + webapp.add_route( '/search', controller='search', action='index' ) # Add the web API webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app ) @@ -122,6 +123,7 @@ webapp.api_mapper.resource( 'visualization', 'visualizations', path_prefix='/api' ) webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' ) webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' ) + webapp.api_mapper.resource( 'search', 'search', path_prefix='/api' ) webapp.api_mapper.resource( 'configuration', 'configuration', path_prefix='/api' ) #webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) ) diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 lib/galaxy/webapps/galaxy/controllers/search.py --- /dev/null +++ b/lib/galaxy/webapps/galaxy/controllers/search.py @@ -0,0 +1,18 @@ + +""" +Contains the main interface in the Universe class +""" +import logging, os, string, shutil, urllib, re, socket +from cgi import escape, FieldStorage +from galaxy import util, datatypes, jobs, web, util +from galaxy.web.base.controller import * +from galaxy.util.sanitize_html import sanitize_html +from galaxy.model.orm import * +from galaxy.model.item_attrs import UsesAnnotations + +log = logging.getLogger( __name__ ) + +class SearchController( BaseUIController ): + @web.expose + def index(self, trans): + return trans.fill_template( "search/index.mako") \ No newline at end of file diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 templates/search/index.mako --- /dev/null +++ b/templates/search/index.mako @@ -0,0 +1,21 @@ + +<%inherit file="/webapps/galaxy/base_panels.mako"/> +<%namespace file="/search/search.mako" import="search_init" /> +<%namespace file="/search/search.mako" import="search_dialog" /> + +<%def name="init()"> +<% + self.has_left_panel = False + self.has_right_panel = False + self.active_view = "profile" +%> +</%def> + +<%def name="center_panel()"> + +${search_init()} + +${search_dialog()} + +</%def> + diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 templates/search/search.mako --- /dev/null +++ b/templates/search/search.mako @@ -0,0 +1,146 @@ + +<%def name="search_init()"> + + ${h.js( + 'libs/jquery/jquery', + 'libs/json2' +)} + +</%def> + +<%def name="search_dialog()"> + +<script type="text/javascript"> + +function doSearch(query) { + if (query.length > 1) { + var url = "/api/search"; + var query_struct = {}; + var domain_array = []; + var field = $("#field_type").val(); + if (field == "name") { + query_struct["name"] = { "$like" : "%" + $("#search_text").val() + "%" }; + } + + if ($("#history_search").val()) { + domain_array[domain_array.length] = "history" + } + if ($("#history_dataset_search").val()) { + domain_array[domain_array.length] = "history_dataset" + } + if ($("#library_dataset_search").val()) { + domain_array[domain_array.length] = "library_dataset" + } + + $.ajax({ + type : 'POST', + url: url, + data: JSON.stringify({ "domain" : domain_array, "query" : query_struct }), + contentType : 'application/json', + dataType : 'json', + success : function(data) { + $("#output").empty(); + var p = $("#output"); + p.empty(); + for(var i in data) { + p.append( $("<div style='margin: 20px;'>").append( + $("<a href='/file/" + data[i]['id'] + "'>" + data[i]['name'] + "</a>") + )).append( + $("<div class='quote'>").append( + data[i]['name'] + ) + ); + } + } + }); + } +}; + +$(document).ready( function() { + $("#search_button").click(function() { + doSearch($("#search_text").val()); + }); + $('#search_text').keyup(function(e){ + if(e.keyCode == 13) { + doSearch($("#search_text").val()); + } + }); + doSearch($("#search_text").val()); +}); + +var queryURL = function (query) { + var url = "/api/search" + encodeURIComponent(query); + url = url + "&field=" + $("#searchFields").val(); + if ($("#fileType").val() != "All") { + url = url + "&type=" + $("#fileType").val() + } + return url; +} + +var suggestURL = function (query) { + var url = "/api/search" + encodeURIComponent(query); + url = url + "&field=" + $("#searchFields").val(); + if ($("#fileType").val() != "All") { + url = url + "&type=" + $("#fileType").val() + } + return url; +} + +var postClick = function(data) { + $("#name").attr("value", data.data[0]); + $("#type").attr("value", data.data[1]); + $("#uuid").attr("value", data.data[2]); + document.getElementById('galaxy_form').submit(); +} + +var formatLink = function(doc) { + return $("<a href='/file/" + doc['_id'] + "'>" + doc['@id'] + "</a>"); +} + +var formatDesc = function(doc) { + var out = "<div>" + doc['md5'] +"</div>"; + if (doc['description'] != null) { + return out + doc['description'].substring(0, 350) + } else { + return out + "<div>No Description</div>"; + } +} + + +</script> + + +<div style="margin: 20px; position: absolute; width: 100%; height: 100%"> + +<div id="search_box"> +<input type="text" id="search_text" size="90"/> + +<div align="left"> +<h3>Domain</h3> +<input type="checkbox" checked="true" id="history_search">History</input><br/> +<input type="checkbox" checked="true" id="history_dataset_search">History Dataset</input><br/> +<input type="checkbox" checked="true" id="library_dataset_search">Library Dataset</input><br/> +</div> + + +<div> +<h3>Fields</h3> +<select id="field_type"> +<option value="name">Name</a> +<option value="datatype">Data Type</a> +<option value="tag">Tags</a> +<option value="annotations">Annotations</a> +<option value="extended_metadata">Extended Metadata</a> +</select> +</div> +<div style="margin: 20px;"> +<input type="button" id="search_button" value="Search"/> +</div> + +<div id="output"></div> + +</div> + +</div> + +</%def> \ No newline at end of file diff -r 8d2fe975c13c33439167903653c6c06217cab764 -r be5107c0321d1a7e7ab0221e7feb711932488bb0 templates/webapps/galaxy/base_panels.mako --- a/templates/webapps/galaxy/base_panels.mako +++ b/templates/webapps/galaxy/base_panels.mako @@ -80,7 +80,9 @@ [ _('Published Histories'), h.url_for( controller='/history', action='list_published' ) ], [ _('Published Workflows'), h.url_for( controller='/workflow', action='list_published' ) ], [ _('Published Visualizations'), h.url_for( controller='/visualization', action='list_published' ) ], - [ _('Published Pages'), h.url_for( controller='/page', action='list_published' ) ] + [ _('Published Pages'), h.url_for( controller='/page', action='list_published' ) ], + None, + [ _('Search'), h.url_for( controller='/search', action='index' ) ] ] tab( "shared", _("Shared Data"), h.url_for( controller='/library', action='index'), menu_options=menu_options ) %> https://bitbucket.org/galaxy/galaxy-central/commits/bd6d97447ef8/ Changeset: bd6d97447ef8 Branch: search User: Kyle Ellrott Date: 2013-01-25 00:59:19 Summary: default merge Affected #: 84 files diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 .hgignore --- a/.hgignore +++ b/.hgignore @@ -16,6 +16,7 @@ database/community_files database/compiled_templates database/files +database/job_working_directory database/pbs database/tmp database/*.sqlite @@ -23,6 +24,11 @@ # Python bytecode *.pyc +# Tool Shed Runtime Files +community_webapp.log +community_webapp.pid +hgweb.config* + # Config files universe_wsgi.ini reports_wsgi.ini diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 doc/source/lib/galaxy.webapps.galaxy.api.rst --- a/doc/source/lib/galaxy.webapps.galaxy.api.rst +++ b/doc/source/lib/galaxy.webapps.galaxy.api.rst @@ -23,10 +23,6 @@ Quickstart ========== -Set the following option in universe_wsgi.ini and start the server:: - - enable_api = True - Log in as your user, navigate to the API Keys page in the User menu, and generate a new API key. Make a note of the API key, and then pull up a terminal. Now we'll use the display.py script in your galaxy/scripts/api diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/datatypes/tabular.py --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -13,7 +13,7 @@ from galaxy.datatypes import metadata from galaxy.datatypes.checkers import is_gzip from galaxy.datatypes.metadata import MetadataElement -from galaxy.datatypes.sniff import get_headers +from galaxy.datatypes.sniff import get_headers, get_test_fname from galaxy.util.json import to_json_string log = logging.getLogger(__name__) diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -346,7 +346,7 @@ Return an id tag suitable for identifying the task. This combines the task's job id and the task's own id. """ - return "%s:%s" % ( self.job.get_id(), self.get_id() ) + return "%s_%s" % ( self.job.get_id(), self.get_id() ) def get_command_line( self ): return self.command_line def get_parameters( self ): diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/model/item_attrs.py --- a/lib/galaxy/model/item_attrs.py +++ b/lib/galaxy/model/item_attrs.py @@ -137,6 +137,12 @@ # Set annotation. annotation_assoc.annotation = annotation return annotation_assoc + + def delete_item_annotation( self, db_session, user, item): + annotation_assoc = self.get_item_annotation_obj( db_session, user, item ) + if annotation_assoc: + db_session.delete(annotation_assoc) + db_session.flush() def copy_item_annotation( self, db_session, source_user, source_item, target_user, target_item ): """ Copy an annotation from a user/item source to a user/item target. """ diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py --- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py +++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py @@ -61,13 +61,20 @@ def downgrade(): metadata.reflect() - ExtendedMetadata_table.drop() - ExtendedMetadataIndex_table.drop() + try: + ExtendedMetadataIndex_table.drop() + except Exception, e: + log.debug( "Dropping 'extended_metadata_index' table failed: %s" % ( str( e ) ) ) + + try: + ExtendedMetadata_table.drop() + except Exception, e: + log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) ) - # Drop the Job table's exit_code column. + # Drop the LDDA table's extended metadata ID column. try: - job_table = Table( "library_dataset_dataset_association", metadata, autoload=True ) - extended_metadata_id = job_table.c.extended_metadata_id + ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True ) + extended_metadata_id = ldda_table.c.extended_metadata_id extended_metadata_id.drop() except Exception, e: log.debug( "Dropping 'extended_metadata_id' column from library_dataset_dataset_association table failed: %s" % ( str( e ) ) ) diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/tool_shed/update_manager.py --- a/lib/galaxy/tool_shed/update_manager.py +++ b/lib/galaxy/tool_shed/update_manager.py @@ -4,6 +4,7 @@ import threading, urllib2, logging from galaxy.util import string_as_bool import galaxy.util.shed_util as shed_util +from galaxy.model.orm import and_ log = logging.getLogger( __name__ ) diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -813,7 +813,7 @@ self.attributes['split_size'] = 20 self.attributes['split_mode'] = 'number_of_parts' -class Tool: +class Tool( object ): """ Represents a computational tool that can be executed through Galaxy. """ diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -2,7 +2,7 @@ Utility functions used systemwide. """ -import logging, threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile, stat, grp, smtplib +import logging, threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile, stat, grp, smtplib, errno from email.MIMEText import MIMEText # Older py compatibility diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -350,7 +350,7 @@ # Keep the one-to-one mapping between items in 3 lists. created_or_updated_tool_shed_repositories.append( tool_shed_repository ) tool_panel_section_keys.append( tool_panel_section_key ) - filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) ) + filtered_repo_info_dicts.append( repo_info_dict ) # Build repository dependency relationships even if the user chose to not install repository dependencies. suc.build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories ) return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message @@ -596,8 +596,7 @@ # In this case, a record for the repository will exist in the database with the status of 'New'. repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision ) if repository and repository.metadata: - installed_rd, missing_rd = \ - get_installed_and_missing_repository_dependencies( trans, repository ) + installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository ) else: installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple ) # Discover all repository dependencies and retrieve information for installing them. diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/util/shed_util_common.py --- a/lib/galaxy/util/shed_util_common.py +++ b/lib/galaxy/util/shed_util_common.py @@ -1,12 +1,16 @@ import os, shutil, tempfile, logging, string, threading, urllib2, filecmp -from galaxy import util +from datetime import datetime +from time import gmtime, strftime +from galaxy import web, util from galaxy.tools import parameters from galaxy.util import inflector, json +from galaxy.util.odict import odict from galaxy.web import url_for from galaxy.web.form_builder import SelectField from galaxy.webapps.community.util import container_util from galaxy.datatypes import checkers from galaxy.model.orm import and_ +import sqlalchemy.orm.exc from galaxy.tools.parameters import dynamic_options from galaxy.tool_shed import encoding_util @@ -38,6 +42,60 @@ TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER' VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" ) +new_repo_email_alert_template = """ +Repository name: ${repository_name} +Revision: ${revision} +Change description: +${description} + +Uploaded by: ${username} +Date content uploaded: ${display_date} + +${content_alert_str} + +----------------------------------------------------------------------------- +This change alert was sent from the Galaxy tool shed hosted on the server +"${host}" +----------------------------------------------------------------------------- +You received this alert because you registered to receive email when +new repositories were created in the Galaxy tool shed named "${host}". +----------------------------------------------------------------------------- +""" + +email_alert_template = """ +Repository name: ${repository_name} +Revision: ${revision} +Change description: +${description} + +Changed by: ${username} +Date of change: ${display_date} + +${content_alert_str} + +----------------------------------------------------------------------------- +This change alert was sent from the Galaxy tool shed hosted on the server +"${host}" +----------------------------------------------------------------------------- +You received this alert because you registered to receive email whenever +changes were made to the repository named "${repository_name}". +----------------------------------------------------------------------------- +""" + +contact_owner_template = """ +GALAXY TOOL SHED REPOSITORY MESSAGE +------------------------ + +The user '${username}' sent you the following message regarding your tool shed +repository named '${repository_name}'. You can respond by sending a reply to +the user's email address: ${email}. +----------------------------------------------------------------------------- +${message} +----------------------------------------------------------------------------- +This message was sent from the Galaxy Tool Shed instance hosted on the server +'${host}' +""" + def add_installation_directories_to_tool_dependencies( trans, tool_dependencies ): """ Determine the path to the installation directory for each of the received tool dependencies. This path will be displayed within the tool dependencies @@ -65,8 +123,20 @@ requirements_dict[ 'install_dir' ] = install_dir tool_dependencies[ dependency_key ] = requirements_dict return tool_dependencies +def add_tool_versions( trans, id, repository_metadata, changeset_revisions ): + # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. + metadata = repository_metadata.metadata + tool_versions_dict = {} + for tool_dict in metadata.get( 'tools', [] ): + # We have at least 2 changeset revisions to compare tool guids and tool ids. + parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions ) + tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id + if tool_versions_dict: + repository_metadata.tool_versions = tool_versions_dict + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() def build_readme_files_dict( metadata, tool_path=None ): - """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received repository_metadata.""" + """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata.""" readme_files_dict = {} if metadata: if 'readme_files' in metadata: @@ -128,10 +198,14 @@ containers_dict[ 'readme_files' ] = readme_files_root_folder # Installed repository dependencies container. if repository_dependencies: + if new_install: + label = 'Repository dependencies' + else: + label = 'Installed repository dependencies' folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans, folder_id=folder_id, repository_dependencies=repository_dependencies, - label='Installed repository dependencies', + label=label, installed=True ) containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder # Missing repository dependencies container. @@ -435,6 +509,30 @@ except: pass return can_use_disk_file +def changeset_is_malicious( trans, id, changeset_revision, **kwd ): + """Check the malicious flag in repository metadata for a specified change set""" + repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) + if repository_metadata: + return repository_metadata.malicious + return False +def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ): + """Determine if the current changeset revision has been reviewed by the current user.""" + for review in repository.reviews: + if review.changeset_revision == changeset_revision and review.user == user: + return True + return False +def check_file_contents( trans ): + """See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be + checked for inappropriate content. + """ + admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) + for repository in trans.sa_session.query( trans.model.Repository ) \ + .filter( trans.model.Repository.table.c.email_alerts != None ): + email_alerts = json.from_json_string( repository.email_alerts ) + for user_email in email_alerts: + if user_email in admin_users: + return True + return False def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ): """ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make @@ -777,17 +875,20 @@ sa_session.flush() return tool_shed_repository def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None, - repository_metadata=None, metadata=None, repository_dependencies=None ): + repository_metadata=None, tool_dependencies=None, repository_dependencies=None ): """ Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies. - This method is called from Galaxy in two places: - 1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received - repository and repository_metadata will be objects. - 2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and - repository_metadata will be None, but metadata will be the tool_shed_repository metadata on the Galaxy side, and the repository_dependencies will - be an object previously retrieved from the tool shed. + This method is called from Galaxy unser three scenarios: + 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received + repository and repository_metadata will be objects., but tool_dependencies and repository_dependencies will be None + 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both + repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the + tool shed if the repository includes definitions for them. + 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this + method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects + but tool_dependencies and repository_dependencies will be None. """ repo_info_dict = {} repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner ) @@ -806,27 +907,24 @@ all_repository_dependencies=None, handled_key_rd_dicts=None, circular_repository_dependencies=None ) - if metadata: - tool_dependencies = metadata.get( 'tool_dependencies', None ) - if tool_dependencies: - new_tool_dependencies = {} - for dependency_key, requirements_dict in tool_dependencies.items(): - if dependency_key in [ 'set_environment' ]: - new_set_environment_dict_list = [] - for set_environment_dict in requirements_dict: - set_environment_dict[ 'repository_name' ] = repository_name - set_environment_dict[ 'repository_owner' ] = repository_owner - set_environment_dict[ 'changeset_revision' ] = changeset_revision - new_set_environment_dict_list.append( set_environment_dict ) - new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list - else: - requirements_dict[ 'repository_name' ] = repository_name - requirements_dict[ 'repository_owner' ] = repository_owner - requirements_dict[ 'changeset_revision' ] = changeset_revision - new_tool_dependencies[ dependency_key ] = requirements_dict - tool_dependencies = new_tool_dependencies - else: - tool_dependencies = None + tool_dependencies = metadata.get( 'tool_dependencies', None ) + if tool_dependencies: + new_tool_dependencies = {} + for dependency_key, requirements_dict in tool_dependencies.items(): + if dependency_key in [ 'set_environment' ]: + new_set_environment_dict_list = [] + for set_environment_dict in requirements_dict: + set_environment_dict[ 'repository_name' ] = repository_name + set_environment_dict[ 'repository_owner' ] = repository_owner + set_environment_dict[ 'changeset_revision' ] = changeset_revision + new_set_environment_dict_list.append( set_environment_dict ) + new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list + else: + requirements_dict[ 'repository_name' ] = repository_name + requirements_dict[ 'repository_owner' ] = repository_owner + requirements_dict[ 'changeset_revision' ] = changeset_revision + new_tool_dependencies[ dependency_key ] = requirements_dict + tool_dependencies = new_tool_dependencies # Cast unicode to string. repo_info_dict[ str( repository.name ) ] = ( str( repository.description ), str( repository_clone_url ), @@ -1407,13 +1505,49 @@ if name == stripped_file_name: return os.path.abspath( os.path.join( root, name ) ) return file_path +def get_categories( trans ): + """Get all categories from the database.""" + return trans.sa_session.query( trans.model.Category ) \ + .filter( trans.model.Category.table.c.deleted==False ) \ + .order_by( trans.model.Category.table.c.name ) \ + .all() +def get_category( trans, id ): + """Get a category from the database.""" + return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) ) +def get_category_by_name( trans, name ): + """Get a category from the database via name.""" + try: + return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one() + except sqlalchemy.orm.exc.NoResultFound: + return None def get_changectx_for_changeset( repo, changeset_revision, **kwd ): - """Retrieve a specified changectx from a repository""" + """Retrieve a specified changectx from a repository.""" for changeset in repo.changelog: ctx = repo.changectx( changeset ) if str( ctx ) == changeset_revision: return ctx return None +def get_component( trans, id ): + """Get a component from the database.""" + return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) ) +def get_component_by_name( trans, name ): + """Get a component from the database via a name.""" + return trans.sa_session.query( trans.app.model.Component ) \ + .filter( trans.app.model.Component.table.c.name==name ) \ + .first() +def get_component_review( trans, id ): + """Get a component_review from the database""" + return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) ) +def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ): + """Get a component_review from the database via repository_review_id and component_id.""" + return trans.sa_session.query( trans.model.ComponentReview ) \ + .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ), + trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \ + .first() +def get_components( trans ): + return trans.sa_session.query( trans.app.model.Component ) \ + .order_by( trans.app.model.Component.name ) \ + .all() def get_config_from_disk( config_file, relative_install_dir ): for root, dirs, files in os.walk( relative_install_dir ): if root.find( '.hg' ) < 0: @@ -1422,7 +1556,7 @@ return os.path.abspath( os.path.join( root, name ) ) return None def get_configured_ui(): - # Configure any desired ui settings. + """Configure any desired ui settings.""" _ui = ui.ui() # The following will suppress all messages. This is # the same as adding the following setting to the repo @@ -1475,6 +1609,12 @@ def get_installed_tool_shed_repository( trans, id ): """Get a repository on the Galaxy side from the database via id""" return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) ) +def get_latest_repository_metadata( trans, decoded_repository_id ): + """Get last metadata defined for a specified repository from the database.""" + return trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \ + .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ + .first() def get_latest_tool_config_revision_from_repository_manifest( repo, filename, changeset_revision ): """ Get the latest revision of a tool config file named filename from the repository manifest up to the value of changeset_revision. @@ -1645,6 +1785,21 @@ return INITIAL_CHANGELOG_HASH else: previous_changeset_revision = changeset_revision +def get_previous_repository_reviews( trans, repository, changeset_revision ): + """Return an ordered dictionary of repository reviews up to and including the received changeset revision.""" + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] + previous_reviews_dict = odict() + for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + previous_changeset_revision = str( repo.changectx( changeset ) ) + if previous_changeset_revision in reviewed_revision_hashes: + previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision ) + revision_reviews = get_reviews_by_repository_id_changeset_revision( trans, + trans.security.encode_id( repository.id ), + previous_changeset_revision ) + previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label, + reviews=revision_reviews ) + return previous_reviews_dict def get_readme_file_names( repository_name ): readme_files = [ 'readme', 'read_me', 'install' ] valid_filenames = [ r for r in readme_files ] @@ -1660,6 +1815,9 @@ elif len( repo_info_tuple ) == 7: description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies +def get_repository_by_name( trans, name ): + """Get a repository from the database via name.""" + return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() def get_repository_by_name_and_owner( trans, name, owner ): """Get a repository from the database via name and owner""" if trans.webapp.name == 'galaxy': @@ -1730,6 +1888,7 @@ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies ) elif key_rd_dicts_to_be_processed: handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies ) + all_repository_dependencies = prune_invalid_repository_dependencies( all_repository_dependencies ) return all_repository_dependencies def get_repository_dependency_as_key( repository_dependency ): return container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ], @@ -1815,6 +1974,25 @@ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ), trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \ .first() +def get_repository_metadata_revisions_for_review( repository, reviewed=True ): + repository_metadata_revisions = [] + metadata_changeset_revision_hashes = [] + if reviewed: + for metadata_revision in repository.metadata_revisions: + metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision ) + for review in repository.reviews: + if review.changeset_revision in metadata_changeset_revision_hashes: + rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ] + if review.changeset_revision not in rmcr_hashes: + repository_metadata_revisions.append( review.repository_metadata ) + else: + for review in repository.reviews: + if review.changeset_revision not in metadata_changeset_revision_hashes: + metadata_changeset_revision_hashes.append( review.changeset_revision ) + for metadata_revision in repository.metadata_revisions: + if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes: + repository_metadata_revisions.append( metadata_revision ) + return repository_metadata_revisions def get_repository_tools_tups( app, metadata_dict ): repository_tools_tups = [] index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) ) @@ -1845,6 +2023,48 @@ relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ): relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ] return relative_path_to_file +def get_reversed_changelog_changesets( repo ): + reversed_changelog = [] + for changeset in repo.changelog: + reversed_changelog.insert( 0, changeset ) + return reversed_changelog +def get_review( trans, id ): + """Get a repository_review from the database via id.""" + return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) ) +def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ): + """Get all repository_reviews from the database via repository id and changeset_revision.""" + return trans.sa_session.query( trans.model.RepositoryReview ) \ + .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), + trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \ + .all() +def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ): + """Get a repository_review from the database via repository id, changeset_revision and user_id.""" + return trans.sa_session.query( trans.model.RepositoryReview ) \ + .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), + trans.model.RepositoryReview.changeset_revision == changeset_revision, + trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \ + .first() +def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ): + if repository is None: + repository = repository_metadata.repository + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + changeset_revision = repository_metadata.changeset_revision + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + rev = '%04d' % ctx.rev() + label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + rev = '-1' + label = "-1:%s" % changeset_revision + return rev, label, changeset_revision +def get_revision_label( trans, repository, changeset_revision ): + """Return a string consisting of the human read-able changeset rev and the changeset revision string.""" + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + return "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + return "-1:%s" % changeset_revision def get_sample_files_from_disk( repository_files_dir, tool_path=None, relative_install_dir=None, resetting_all_metadata_on_repository=False ): if resetting_all_metadata_on_repository: # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata. @@ -1873,6 +2093,15 @@ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :] sample_file_metadata_paths.append( relative_path_to_sample_file ) return sample_file_metadata_paths, sample_file_copy_paths +def get_rev_label_from_changeset_revision( repo, changeset_revision ): + ctx = get_changectx_for_changeset( repo, changeset_revision ) + if ctx: + rev = '%04d' % ctx.rev() + label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) + else: + rev = '-1' + label = "-1:%s" % changeset_revision + return rev, label def get_shed_tool_conf_dict( app, shed_tool_conf ): """ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry @@ -1953,12 +2182,12 @@ for key_rd_dict in key_rd_dicts: key = key_rd_dict.keys()[ 0 ] repository_dependency = key_rd_dict[ key ] - toolshed, name, owner, changeset_revision = repository_dependency - if tool_shed_is_this_tool_shed( toolshed ): - repository = get_repository_by_name_and_owner( trans, name, owner ) + rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_dependency + if tool_shed_is_this_tool_shed( rd_toolshed ): + repository = get_repository_by_name_and_owner( trans, rd_name, rd_owner ) repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans, trans.security.encode_id( repository.id ), - changeset_revision ) + rd_changeset_revision ) if repository_metadata: # The repository changeset_revision is installable, so no updates are available. new_key_rd_dict = {} @@ -1968,15 +2197,20 @@ # The repository changeset_revision is no longer installable, so see if there's been an update. repo_dir = repository.repo_path( trans.app ) repo = hg.repository( get_configured_ui(), repo_dir ) - changeset_revision = get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) + changeset_revision = get_next_downloadable_changeset_revision( repository, repo, rd_changeset_revision ) repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) if repository_metadata: new_key_rd_dict = {} - new_key_rd_dict[ key ] = [ toolshed, name, owner, repository_metadata.changeset_revision ] + new_key_rd_dict[ key ] = [ rd_toolshed, rd_name, rd_owner, repository_metadata.changeset_revision ] # We have the updated changset revision. updated_key_rd_dicts.append( new_key_rd_dict ) + else: + toolshed, repository_name, repository_owner, repository_changeset_revision = container_util.get_components_from_key( key ) + message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \ + ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) ) + log.debug( message ) return updated_key_rd_dicts def get_url_from_repository_tool_shed( app, repository ): """ @@ -2000,8 +2234,11 @@ return shed_url # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. return None +def get_user( trans, id ): + """Get a user from the database by id.""" + return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) ) def get_user_by_username( trans, username ): - """Get a user from the database by username""" + """Get a user from the database by username.""" return trans.sa_session.query( trans.model.User ) \ .filter( trans.model.User.table.c.username == username ) \ .one() @@ -2040,6 +2277,94 @@ all_repository_dependencies=all_repository_dependencies, handled_key_rd_dicts=handled_key_rd_dicts, circular_repository_dependencies=circular_repository_dependencies ) +def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ): + # There are 2 complementary features that enable a tool shed user to receive email notification: + # 1. Within User Preferences, they can elect to receive email when the first (or first valid) + # change set is produced for a new repository. + # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts" + # which caused them to receive email alerts when updates to the repository occur. This same feature + # is available on a per-repository basis on the repository grid within the tool shed. + # + # There are currently 4 scenarios for sending email notification when a change is made to a repository: + # 1. An admin user elects to receive email when the first change set is produced for a new repository + # from User Preferences. The change set does not have to include any valid content. This allows for + # the capture of inappropriate content being uploaded to new repositories. + # 2. A regular user elects to receive email when the first valid change set is produced for a new repository + # from User Preferences. This differs from 1 above in that the user will not receive email until a + # change set tha tincludes valid content is produced. + # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the + # user is an admin user, the email will include information about both HTML and image content that was + # included in the change set. + # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the + # user is not an admin user, the email will not include any information about both HTML and image content + # that was included in the change set. + repo_dir = repository.repo_path( trans.app ) + repo = hg.repository( get_configured_ui(), repo_dir ) + smtp_server = trans.app.config.smtp_server + if smtp_server and ( new_repo_alert or repository.email_alerts ): + # Send email alert to users that want them. + if trans.app.config.email_from is not None: + email_from = trans.app.config.email_from + elif trans.request.host.split( ':' )[0] == 'localhost': + email_from = 'galaxy-no-reply@' + socket.getfqdn() + else: + email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0] + tip_changeset = repo.changelog.tip() + ctx = repo.changectx( tip_changeset ) + t, tz = ctx.date() + date = datetime( *gmtime( float( t ) - tz )[:6] ) + display_date = date.strftime( "%Y-%m-%d" ) + try: + username = ctx.user().split()[0] + except: + username = ctx.user() + # We'll use 2 template bodies because we only want to send content + # alerts to tool shed admin users. + if new_repo_alert: + template = new_repo_email_alert_template + else: + template = email_alert_template + admin_body = string.Template( template ).safe_substitute( host=trans.request.host, + repository_name=repository.name, + revision='%s:%s' %( str( ctx.rev() ), ctx ), + display_date=display_date, + description=ctx.description(), + username=username, + content_alert_str=content_alert_str ) + body = string.Template( template ).safe_substitute( host=trans.request.host, + repository_name=repository.name, + revision='%s:%s' %( str( ctx.rev() ), ctx ), + display_date=display_date, + description=ctx.description(), + username=username, + content_alert_str='' ) + admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) + frm = email_from + if new_repo_alert: + subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name ) + subject = subject[ :80 ] + email_alerts = [] + for user in trans.sa_session.query( trans.model.User ) \ + .filter( and_( trans.model.User.table.c.deleted == False, + trans.model.User.table.c.new_repo_alert == True ) ): + if admin_only: + if user.email in admin_users: + email_alerts.append( user.email ) + else: + email_alerts.append( user.email ) + else: + subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name ) + email_alerts = json.from_json_string( repository.email_alerts ) + for email in email_alerts: + to = email.strip() + # Send it + try: + if to in admin_users: + util.send_mail( frm, to, subject, admin_body, trans.app.config ) + else: + util.send_mail( frm, to, subject, body, trans.app.config ) + except Exception, e: + log.exception( "An error occurred sending a tool shed repository update alert by email." ) def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ): """ This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an @@ -2156,6 +2481,15 @@ message = str( e ) error = True return error, message +def has_previous_repository_reviews( trans, repository, changeset_revision ): + """Determine if a repository has a changeset revision review prior to the received changeset revision.""" + repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) + reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] + for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): + previous_changeset_revision = str( repo.changectx( changeset ) ) + if previous_changeset_revision in reviewed_revision_hashes: + return True + return False def in_all_repository_dependencies( repository_key, repository_dependency, all_repository_dependencies ): """Return True if { repository_key :repository_dependency } is in all_repository_dependencies.""" for key, val in all_repository_dependencies.items(): @@ -2344,6 +2678,89 @@ containers_dict[ 'tool_dependencies' ] = root_container containers_dict[ 'missing_tool_dependencies' ] = None return containers_dict +def new_repository_dependency_metadata_required( trans, repository, metadata_dict ): + """ + Compare the last saved metadata for each repository dependency in the repository with the new metadata in metadata_dict to determine if a new + repository_metadata table record is required or if the last saved metadata record can be updated instead. + """ + if 'repository_dependencies' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if 'repository_dependencies' in metadata: + saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] + new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] + # The saved metadata must be a subset of the new metadata. + for new_repository_dependency_metadata in new_repository_dependencies: + if new_repository_dependency_metadata not in saved_repository_dependencies: + return True + for saved_repository_dependency_metadata in saved_repository_dependencies: + if saved_repository_dependency_metadata not in new_repository_dependencies: + return True + else: + # We have repository metadata that does not include metadata for any repository dependencies in the + # repository, so we can update the existing repository metadata. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata table record is not needed. + return False +def new_tool_metadata_required( trans, repository, metadata_dict ): + """ + Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata + table record is required, or if the last saved metadata record can be updated instead. + """ + if 'tools' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if 'tools' in metadata: + saved_tool_ids = [] + # The metadata for one or more tools was successfully generated in the past + # for this repository, so we first compare the version string for each tool id + # in metadata_dict with what was previously saved to see if we need to create + # a new table record or if we can simply update the existing record. + for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + for saved_tool_metadata_dict in metadata[ 'tools' ]: + if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: + saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) + if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]: + if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: + return True + # So far, a new metadata record is not required, but we still have to check to see if + # any new tool ids exist in metadata_dict that are not in the saved metadata. We do + # this because if a new tarball was uploaded to a repository that included tools, it + # may have removed existing tool files if they were not included in the uploaded tarball. + for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: + return True + else: + # We have repository metadata that does not include metadata for any tools in the + # repository, so we can update the existing repository metadata. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed. + return False +def new_workflow_metadata_required( trans, repository, metadata_dict ): + """ + Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of + exported workflows. If this changes at some future time, this method should be enhanced accordingly. + """ + if 'workflows' in metadata_dict: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + # The repository has metadata, so update the workflows value - no new record is needed. + return False + else: + # There is no saved repository metadata, so we need to create a new repository_metadata table record. + return True + # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. + return False def open_repository_files_folder( trans, folder_path ): try: files_list = get_repository_files( trans, folder_path ) @@ -2368,6 +2785,7 @@ def populate_repository_dependency_objects_for_processing( trans, current_repository_key, repository_dependencies_dict, key_rd_dicts_to_be_processed, handled_key_rd_dicts, circular_repository_dependencies, all_repository_dependencies ): current_repository_key_rd_dicts = [] + filtered_current_repository_key_rd_dicts = [] for rd in repository_dependencies_dict[ 'repository_dependencies' ]: new_key_rd_dict = {} new_key_rd_dict[ current_repository_key ] = rd @@ -2379,6 +2797,7 @@ for key_rd_dict in current_repository_key_rd_dicts: is_circular = False if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ): + filtered_current_repository_key_rd_dicts.append( key_rd_dict ) repository_dependency = key_rd_dict[ current_repository_key ] if current_repository_key in all_repository_dependencies: # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies. @@ -2402,7 +2821,26 @@ new_key_rd_dict = {} new_key_rd_dict[ current_repository_key ] = repository_dependency key_rd_dicts_to_be_processed.append( new_key_rd_dict ) - return current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies + return filtered_current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies +def prune_invalid_repository_dependencies( repository_dependencies ): + """ + Eliminate all invalid entries in the received repository_dependencies dictionary. An entry is invalid if if the value_list of the key/value pair is + empty. This occurs when an invalid combination of tool shed, name , owner, changeset_revision is used and a repository_metadata reocrd is not found. + """ + valid_repository_dependencies = {} + description = repository_dependencies.get( 'description', None ) + root_key = repository_dependencies.get( 'root_key', None ) + if root_key is None: + return valid_repository_dependencies + for key, value in repository_dependencies.items(): + if key in [ 'description', 'root_key' ]: + continue + if value: + valid_repository_dependencies[ key ] = value + if valid_repository_dependencies: + valid_repository_dependencies[ 'description' ] = description + valid_repository_dependencies[ 'root_key' ] = root_key + return valid_repository_dependencies def remove_dir( dir ): if os.path.exists( dir ): try: @@ -2697,6 +3135,77 @@ return reversed_changelog def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ): return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision ) +def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): + """ + Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset + has problems. + """ + message = '' + status = 'done' + encoded_id = trans.security.encode_id( repository.id ) + repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository ) + repo_dir = repository.repo_path( trans.app ) + repo = hg.repository( get_configured_ui(), repo_dir ) + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + relative_install_dir=repo_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) + if metadata_dict: + downloadable = is_downloadable( metadata_dict ) + repository_metadata = None + if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \ + new_tool_metadata_required( trans, repository, metadata_dict ) or \ + new_workflow_metadata_required( trans, repository, metadata_dict ): + # Create a new repository_metadata table row. + repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict ) + # If this is the first record stored for this repository, see if we need to send any email alerts. + if len( repository.downloadable_revisions ) == 1: + handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) + else: + repository_metadata = get_latest_repository_metadata( trans, repository.id ) + if repository_metadata: + downloadable = is_downloadable( metadata_dict ) + # Update the last saved repository_metadata table row. + repository_metadata.changeset_revision = repository.tip( trans.app ) + repository_metadata.metadata = metadata_dict + repository_metadata.downloadable = downloadable + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + else: + # There are no tools in the repository, and we're setting metadata on the repository tip. + repository_metadata = create_or_update_repository_metadata( trans, encoded_id, repository, repository.tip( trans.app ), metadata_dict ) + if 'tools' in metadata_dict and repository_metadata and status != 'error': + # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog. + changeset_revisions = [] + for changeset in repo.changelog: + changeset_revision = str( repo.changectx( changeset ) ) + if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ): + changeset_revisions.append( changeset_revision ) + add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) + elif len( repo ) == 1 and not invalid_file_tups: + message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) ) + message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." + status = "error" + if invalid_file_tups: + message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + status = 'error' + # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. + reset_tool_data_tables( trans.app ) + return message, status +def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): + # Set metadata on the repository tip. + error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd ) + if error_message: + # If there is an error, display it. + return trans.response.send_redirect( web.url_for( controller='repository', + action='manage_repository', + id=trans.security.encode_id( repository.id ), + message=error_message, + status='error' ) ) def strip_path( fpath ): if not fpath: return fpath @@ -2858,10 +3367,8 @@ # ? = not tracked # I = ignored # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that - # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py. - commands.update( get_configured_ui(), - repo, - rev=ctx_rev ) + # purging is not supported by the mercurial API. + commands.update( get_configured_ui(), repo, rev=ctx_rev ) def url_join( *args ): parts = [] for arg in args: diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/web/base/controllers/admin.py --- a/lib/galaxy/web/base/controllers/admin.py +++ b/lib/galaxy/web/base/controllers/admin.py @@ -224,19 +224,20 @@ role = get_role( trans, id ) if params.get( 'role_members_edit_button', False ): in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ] - for ura in role.users: - user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id ) - if user not in in_users: - # Delete DefaultUserPermissions for previously associated users that have been removed from the role - for dup in user.default_permissions: - if role == dup.role: - trans.sa_session.delete( dup ) - # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role - for history in user.histories: - for dhp in history.default_permissions: - if role == dhp.role: - trans.sa_session.delete( dhp ) - trans.sa_session.flush() + if trans.webapp.name == 'galaxy': + for ura in role.users: + user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id ) + if user not in in_users: + # Delete DefaultUserPermissions for previously associated users that have been removed from the role + for dup in user.default_permissions: + if role == dup.role: + trans.sa_session.delete( dup ) + # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role + for history in user.histories: + for dhp in history.default_permissions: + if role == dhp.role: + trans.sa_session.delete( dhp ) + trans.sa_session.flush() in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ] trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups ) trans.sa_session.refresh( role ) diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/web/framework/__init__.py --- a/lib/galaxy/web/framework/__init__.py +++ b/lib/galaxy/web/framework/__init__.py @@ -274,7 +274,6 @@ from galaxy.web.base.controller import ControllerUnavailable package = import_module( package_name ) controller_dir = package.__path__[0] - print ">>>", controller_dir, package.__path__ for fname in os.listdir( controller_dir ): if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ): name = fname[:-3] diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/webapps/community/controllers/admin.py --- a/lib/galaxy/webapps/community/controllers/admin.py +++ b/lib/galaxy/webapps/community/controllers/admin.py @@ -1,12 +1,11 @@ -from galaxy.web.base.controller import * +from galaxy.web.base.controller import BaseUIController +from galaxy import web, util from galaxy.web.base.controllers.admin import Admin from galaxy.webapps.community import model from galaxy.model.orm import and_ -from galaxy.web.framework.helpers import time_ago, iff, grids -from galaxy.web.form_builder import SelectField +from galaxy.web.framework.helpers import time_ago, grids from galaxy.util import inflector import galaxy.util.shed_util_common as suc -import common from repository import RepositoryGrid, CategoryGrid from galaxy import eggs @@ -474,7 +473,7 @@ if k.startswith( 'f-' ): del kwd[ k ] if 'user_id' in kwd: - user = common.get_user( trans, kwd[ 'user_id' ] ) + user = suc.get_user( trans, kwd[ 'user_id' ] ) kwd[ 'f-email' ] = user.email del kwd[ 'user_id' ] else: @@ -489,7 +488,7 @@ if k.startswith( 'f-' ): del kwd[ k ] category_id = kwd.get( 'id', None ) - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) kwd[ 'f-Category.name' ] = category.name elif operation == "receive email alerts": if kwd[ 'id' ]: @@ -554,7 +553,7 @@ if not name or not description: message = 'Enter a valid name and a description' status = 'error' - elif common.get_category_by_name( trans, name ): + elif suc.get_category_by_name( trans, name ): message = 'A category with that name already exists' status = 'error' else: @@ -641,7 +640,7 @@ action='manage_categories', message=message, status='error' ) ) - category = common.get_category( trans, id ) + category = suc.get_category( trans, id ) if params.get( 'edit_category_button', False ): new_name = util.restore_text( params.get( 'name', '' ) ).strip() new_description = util.restore_text( params.get( 'description', '' ) ).strip() @@ -649,7 +648,7 @@ if not new_name: message = 'Enter a valid name' status = 'error' - elif category.name != new_name and common.get_category_by_name( trans, name ): + elif category.name != new_name and suc.get_category_by_name( trans, name ): message = 'A category with that name already exists' status = 'error' else: @@ -772,7 +771,7 @@ ids = util.listify( id ) message = "Deleted %d categories: " % len( ids ) for category_id in ids: - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) category.deleted = True trans.sa_session.add( category ) trans.sa_session.flush() @@ -800,7 +799,7 @@ purged_categories = "" message = "Purged %d categories: " % len( ids ) for category_id in ids: - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) if category.deleted: # Delete RepositoryCategoryAssociations for rca in category.repositories: @@ -827,7 +826,7 @@ count = 0 undeleted_categories = "" for category_id in ids: - category = common.get_category( trans, category_id ) + category = suc.get_category( trans, category_id ) if category.deleted: category.deleted = False trans.sa_session.add( category ) diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/webapps/community/controllers/common.py --- a/lib/galaxy/webapps/community/controllers/common.py +++ /dev/null @@ -1,597 +0,0 @@ -import os, string, socket, logging, simplejson, binascii, tempfile -from time import gmtime, strftime -from datetime import * -from galaxy.tools import * -from galaxy.util.odict import odict -from galaxy.util.json import from_json_string, to_json_string -import galaxy.util.shed_util_common as suc -from galaxy.web.base.controllers.admin import * -from galaxy.webapps.community import model -from galaxy.model.orm import and_ -from galaxy.model.item_attrs import UsesItemRatings - -from galaxy import eggs -eggs.require('mercurial') -from mercurial import hg, ui, commands - -log = logging.getLogger( __name__ ) - -new_repo_email_alert_template = """ -Repository name: ${repository_name} -Revision: ${revision} -Change description: -${description} - -Uploaded by: ${username} -Date content uploaded: ${display_date} - -${content_alert_str} - ------------------------------------------------------------------------------ -This change alert was sent from the Galaxy tool shed hosted on the server -"${host}" ------------------------------------------------------------------------------ -You received this alert because you registered to receive email when -new repositories were created in the Galaxy tool shed named "${host}". ------------------------------------------------------------------------------ -""" - -email_alert_template = """ -Repository name: ${repository_name} -Revision: ${revision} -Change description: -${description} - -Changed by: ${username} -Date of change: ${display_date} - -${content_alert_str} - ------------------------------------------------------------------------------ -This change alert was sent from the Galaxy tool shed hosted on the server -"${host}" ------------------------------------------------------------------------------ -You received this alert because you registered to receive email whenever -changes were made to the repository named "${repository_name}". ------------------------------------------------------------------------------ -""" - -contact_owner_template = """ -GALAXY TOOL SHED REPOSITORY MESSAGE ------------------------- - -The user '${username}' sent you the following message regarding your tool shed -repository named '${repository_name}'. You can respond by sending a reply to -the user's email address: ${email}. ------------------------------------------------------------------------------ -${message} ------------------------------------------------------------------------------ -This message was sent from the Galaxy Tool Shed instance hosted on the server -'${host}' -""" - -malicious_error = " This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content." -malicious_error_can_push = " Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content." - -class ItemRatings( UsesItemRatings ): - """Overrides rate_item method since we also allow for comments""" - def rate_item( self, trans, user, item, rating, comment='' ): - """ Rate an item. Return type is <item_class>RatingAssociation. """ - item_rating = self.get_user_item_rating( trans.sa_session, user, item, webapp_model=trans.model ) - if not item_rating: - # User has not yet rated item; create rating. - item_rating_assoc_class = self._get_item_rating_assoc_class( item, webapp_model=trans.model ) - item_rating = item_rating_assoc_class() - item_rating.user = trans.user - item_rating.set_item( item ) - item_rating.rating = rating - item_rating.comment = comment - trans.sa_session.add( item_rating ) - trans.sa_session.flush() - elif item_rating.rating != rating or item_rating.comment != comment: - # User has previously rated item; update rating. - item_rating.rating = rating - item_rating.comment = comment - trans.sa_session.add( item_rating ) - trans.sa_session.flush() - return item_rating - -def add_tool_versions( trans, id, repository_metadata, changeset_revisions ): - # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. - metadata = repository_metadata.metadata - tool_versions_dict = {} - for tool_dict in metadata.get( 'tools', [] ): - # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = suc.get_parent_id( trans, - id, - tool_dict[ 'id' ], - tool_dict[ 'version' ], - tool_dict[ 'guid' ], - changeset_revisions ) - tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id - if tool_versions_dict: - repository_metadata.tool_versions = tool_versions_dict - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() -def changeset_is_malicious( trans, id, changeset_revision, **kwd ): - """Check the malicious flag in repository metadata for a specified change set""" - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) - if repository_metadata: - return repository_metadata.malicious - return False -def changeset_revision_reviewed_by_user( trans, user, repository, changeset_revision ): - """Determine if the current changeset revision has been reviewed by the current user.""" - for review in repository.reviews: - if review.changeset_revision == changeset_revision and review.user == user: - return True - return False -def check_file_contents( trans ): - # See if any admin users have chosen to receive email alerts when a repository is updated. - # If so, the file contents of the update must be checked for inappropriate content. - admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) - for repository in trans.sa_session.query( trans.model.Repository ) \ - .filter( trans.model.Repository.table.c.email_alerts != None ): - email_alerts = from_json_string( repository.email_alerts ) - for user_email in email_alerts: - if user_email in admin_users: - return True - return False -def get_category( trans, id ): - """Get a category from the database""" - return trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( id ) ) -def get_category_by_name( trans, name ): - """Get a category from the database via name""" - try: - return trans.sa_session.query( trans.model.Category ).filter_by( name=name ).one() - except sqlalchemy.orm.exc.NoResultFound: - return None -def get_categories( trans ): - """Get all categories from the database""" - return trans.sa_session.query( trans.model.Category ) \ - .filter( trans.model.Category.table.c.deleted==False ) \ - .order_by( trans.model.Category.table.c.name ) \ - .all() -def get_component( trans, id ): - """Get a component from the database""" - return trans.sa_session.query( trans.model.Component ).get( trans.security.decode_id( id ) ) -def get_component_by_name( trans, name ): - return trans.sa_session.query( trans.app.model.Component ) \ - .filter( trans.app.model.Component.table.c.name==name ) \ - .first() -def get_component_review( trans, id ): - """Get a component_review from the database""" - return trans.sa_session.query( trans.model.ComponentReview ).get( trans.security.decode_id( id ) ) -def get_component_review_by_repository_review_id_component_id( trans, repository_review_id, component_id ): - """Get a component_review from the database via repository_review_id and component_id""" - return trans.sa_session.query( trans.model.ComponentReview ) \ - .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == trans.security.decode_id( repository_review_id ), - trans.model.ComponentReview.table.c.component_id == trans.security.decode_id( component_id ) ) ) \ - .first() -def get_components( trans ): - return trans.sa_session.query( trans.app.model.Component ) \ - .order_by( trans.app.model.Component.name ) \ - .all() -def get_latest_repository_metadata( trans, decoded_repository_id ): - """Get last metadata defined for a specified repository from the database""" - return trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( trans.model.RepositoryMetadata.table.c.repository_id == decoded_repository_id ) \ - .order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \ - .first() -def get_previous_repository_reviews( trans, repository, changeset_revision ): - """Return an ordered dictionary of repository reviews up to and including the received changeset revision.""" - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] - previous_reviews_dict = odict() - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): - previous_changeset_revision = str( repo.changectx( changeset ) ) - if previous_changeset_revision in reviewed_revision_hashes: - previous_rev, previous_changeset_revision_label = get_rev_label_from_changeset_revision( repo, previous_changeset_revision ) - revision_reviews = get_reviews_by_repository_id_changeset_revision( trans, - trans.security.encode_id( repository.id ), - previous_changeset_revision ) - previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label, - reviews=revision_reviews ) - return previous_reviews_dict -def get_repository_by_name( trans, name ): - """Get a repository from the database via name""" - return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one() -def get_repository_metadata_revisions_for_review( repository, reviewed=True ): - repository_metadata_revisions = [] - metadata_changeset_revision_hashes = [] - if reviewed: - for metadata_revision in repository.metadata_revisions: - metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision ) - for review in repository.reviews: - if review.changeset_revision in metadata_changeset_revision_hashes: - rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ] - if review.changeset_revision not in rmcr_hashes: - repository_metadata_revisions.append( review.repository_metadata ) - else: - for review in repository.reviews: - if review.changeset_revision not in metadata_changeset_revision_hashes: - metadata_changeset_revision_hashes.append( review.changeset_revision ) - for metadata_revision in repository.metadata_revisions: - if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes: - repository_metadata_revisions.append( metadata_revision ) - return repository_metadata_revisions -def get_rev_label_changeset_revision_from_repository_metadata( trans, repository_metadata, repository=None ): - if repository is None: - repository = repository_metadata.repository - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - changeset_revision = repository_metadata.changeset_revision - ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - rev = '%04d' % ctx.rev() - label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - rev = '-1' - label = "-1:%s" % changeset_revision - return rev, label, changeset_revision -def get_rev_label_from_changeset_revision( repo, changeset_revision ): - ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - rev = '%04d' % ctx.rev() - label = "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - rev = '-1' - label = "-1:%s" % changeset_revision - return rev, label -def get_reversed_changelog_changesets( repo ): - reversed_changelog = [] - for changeset in repo.changelog: - reversed_changelog.insert( 0, changeset ) - return reversed_changelog -def get_review( trans, id ): - """Get a repository_review from the database via id""" - return trans.sa_session.query( trans.model.RepositoryReview ).get( trans.security.decode_id( id ) ) -def get_review_by_repository_id_changeset_revision_user_id( trans, repository_id, changeset_revision, user_id ): - """Get a repository_review from the database via repository id, changeset_revision and user_id""" - return trans.sa_session.query( trans.model.RepositoryReview ) \ - .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), - trans.model.RepositoryReview.changeset_revision == changeset_revision, - trans.model.RepositoryReview.user_id == trans.security.decode_id( user_id ) ) ) \ - .first() -def get_reviews_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ): - """Get all repository_reviews from the database via repository id and changeset_revision""" - return trans.sa_session.query( trans.model.RepositoryReview ) \ - .filter( and_( trans.model.RepositoryReview.repository_id == trans.security.decode_id( repository_id ), - trans.model.RepositoryReview.changeset_revision == changeset_revision ) ) \ - .all() -def get_revision_label( trans, repository, changeset_revision ): - """ - Return a string consisting of the human read-able - changeset rev and the changeset revision string. - """ - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - if ctx: - return "%s:%s" % ( str( ctx.rev() ), changeset_revision ) - else: - return "-1:%s" % changeset_revision -def get_user( trans, id ): - """Get a user from the database by id""" - return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) ) -def handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=False, admin_only=False ): - # There are 2 complementary features that enable a tool shed user to receive email notification: - # 1. Within User Preferences, they can elect to receive email when the first (or first valid) - # change set is produced for a new repository. - # 2. When viewing or managing a repository, they can check the box labeled "Receive email alerts" - # which caused them to receive email alerts when updates to the repository occur. This same feature - # is available on a per-repository basis on the repository grid within the tool shed. - # - # There are currently 4 scenarios for sending email notification when a change is made to a repository: - # 1. An admin user elects to receive email when the first change set is produced for a new repository - # from User Preferences. The change set does not have to include any valid content. This allows for - # the capture of inappropriate content being uploaded to new repositories. - # 2. A regular user elects to receive email when the first valid change set is produced for a new repository - # from User Preferences. This differs from 1 above in that the user will not receive email until a - # change set tha tincludes valid content is produced. - # 3. An admin user checks the "Receive email alerts" check box on the manage repository page. Since the - # user is an admin user, the email will include information about both HTML and image content that was - # included in the change set. - # 4. A regular user checks the "Receive email alerts" check box on the manage repository page. Since the - # user is not an admin user, the email will not include any information about both HTML and image content - # that was included in the change set. - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - smtp_server = trans.app.config.smtp_server - if smtp_server and ( new_repo_alert or repository.email_alerts ): - # Send email alert to users that want them. - if trans.app.config.email_from is not None: - email_from = trans.app.config.email_from - elif trans.request.host.split( ':' )[0] == 'localhost': - email_from = 'galaxy-no-reply@' + socket.getfqdn() - else: - email_from = 'galaxy-no-reply@' + trans.request.host.split( ':' )[0] - tip_changeset = repo.changelog.tip() - ctx = repo.changectx( tip_changeset ) - t, tz = ctx.date() - date = datetime( *gmtime( float( t ) - tz )[:6] ) - display_date = date.strftime( "%Y-%m-%d" ) - try: - username = ctx.user().split()[0] - except: - username = ctx.user() - # We'll use 2 template bodies because we only want to send content - # alerts to tool shed admin users. - if new_repo_alert: - template = new_repo_email_alert_template - else: - template = email_alert_template - admin_body = string.Template( template ).safe_substitute( host=trans.request.host, - repository_name=repository.name, - revision='%s:%s' %( str( ctx.rev() ), ctx ), - display_date=display_date, - description=ctx.description(), - username=username, - content_alert_str=content_alert_str ) - body = string.Template( template ).safe_substitute( host=trans.request.host, - repository_name=repository.name, - revision='%s:%s' %( str( ctx.rev() ), ctx ), - display_date=display_date, - description=ctx.description(), - username=username, - content_alert_str='' ) - admin_users = trans.app.config.get( "admin_users", "" ).split( "," ) - frm = email_from - if new_repo_alert: - subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name ) - subject = subject[ :80 ] - email_alerts = [] - for user in trans.sa_session.query( trans.model.User ) \ - .filter( and_( trans.model.User.table.c.deleted == False, - trans.model.User.table.c.new_repo_alert == True ) ): - if admin_only: - if user.email in admin_users: - email_alerts.append( user.email ) - else: - email_alerts.append( user.email ) - else: - subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name ) - email_alerts = from_json_string( repository.email_alerts ) - for email in email_alerts: - to = email.strip() - # Send it - try: - if to in admin_users: - util.send_mail( frm, to, subject, admin_body, trans.app.config ) - else: - util.send_mail( frm, to, subject, body, trans.app.config ) - except Exception, e: - log.exception( "An error occurred sending a tool shed repository update alert by email." ) -def has_previous_repository_reviews( trans, repository, changeset_revision ): - """Determine if a repository has a changeset revision review prior to the received changeset revision.""" - repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) ) - reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ] - for changeset in suc.reversed_upper_bounded_changelog( repo, changeset_revision ): - previous_changeset_revision = str( repo.changectx( changeset ) ) - if previous_changeset_revision in reviewed_revision_hashes: - return True - return False -def new_repository_dependency_metadata_required( trans, repository, metadata_dict ): - """ - Compare the last saved metadata for each repository dependency in the repository with the new - metadata in metadata_dict to determine if a new repository_metadata table record is required, - or if the last saved metadata record can be updated instead. - """ - if 'repository_dependencies' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if 'repository_dependencies' in metadata: - saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] - new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] - # The saved metadata must be a subset of the new metadata. - for new_repository_dependency_metadata in new_repository_dependencies: - if new_repository_dependency_metadata not in saved_repository_dependencies: - return True - for saved_repository_dependency_metadata in saved_repository_dependencies: - if saved_repository_dependency_metadata not in new_repository_dependencies: - return True - else: - # We have repository metadata that does not include metadata for any repository dependencies in the - # repository, so we can update the existing repository metadata. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata table record is not needed. - return False -def new_tool_metadata_required( trans, repository, metadata_dict ): - """ - Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata - table record is required, or if the last saved metadata record can be updated instead. - """ - if 'tools' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if 'tools' in metadata: - saved_tool_ids = [] - # The metadata for one or more tools was successfully generated in the past - # for this repository, so we first compare the version string for each tool id - # in metadata_dict with what was previously saved to see if we need to create - # a new table record or if we can simply update the existing record. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: - for saved_tool_metadata_dict in metadata[ 'tools' ]: - if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: - saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) - if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]: - if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: - return True - # So far, a new metadata record is not required, but we still have to check to see if - # any new tool ids exist in metadata_dict that are not in the saved metadata. We do - # this because if a new tarball was uploaded to a repository that included tools, it - # may have removed existing tool files if they were not included in the uploaded tarball. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: - if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: - return True - else: - # We have repository metadata that does not include metadata for any tools in the - # repository, so we can update the existing repository metadata. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed. - return False -def new_workflow_metadata_required( trans, repository, metadata_dict ): - """ - Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of - exported workflows. If this changes at some future time, this method should be enhanced accordingly. - """ - if 'workflows' in metadata_dict: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - # The repository has metadata, so update the workflows value - no new record is needed. - return False - else: - # There is no saved repository metadata, so we need to create a new repository_metadata table record. - return True - # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed. - return False -def set_repository_metadata( trans, repository, content_alert_str='', **kwd ): - """ - Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset - has problems. - """ - message = '' - status = 'done' - encoded_id = trans.security.encode_id( repository.id ) - repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository ) - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - relative_install_dir=repo_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) - if metadata_dict: - downloadable = suc.is_downloadable( metadata_dict ) - repository_metadata = None - if new_repository_dependency_metadata_required( trans, repository, metadata_dict ) or \ - new_tool_metadata_required( trans, repository, metadata_dict ) or \ - new_workflow_metadata_required( trans, repository, metadata_dict ): - # Create a new repository_metadata table row. - repository_metadata = suc.create_or_update_repository_metadata( trans, - encoded_id, - repository, - repository.tip( trans.app ), - metadata_dict ) - # If this is the first record stored for this repository, see if we need to send any email alerts. - if len( repository.downloadable_revisions ) == 1: - handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False ) - else: - repository_metadata = get_latest_repository_metadata( trans, repository.id ) - if repository_metadata: - downloadable = suc.is_downloadable( metadata_dict ) - # Update the last saved repository_metadata table row. - repository_metadata.changeset_revision = repository.tip( trans.app ) - repository_metadata.metadata = metadata_dict - repository_metadata.downloadable = downloadable - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - else: - # There are no tools in the repository, and we're setting metadata on the repository tip. - repository_metadata = suc.create_or_update_repository_metadata( trans, - encoded_id, - repository, - repository.tip( trans.app ), - metadata_dict ) - if 'tools' in metadata_dict and repository_metadata and status != 'error': - # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog. - changeset_revisions = [] - for changeset in repo.changelog: - changeset_revision = str( repo.changectx( changeset ) ) - if suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ): - changeset_revisions.append( changeset_revision ) - add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) - elif len( repo ) == 1 and not invalid_file_tups: - message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) ) - message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." - status = "error" - if invalid_file_tups: - message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) - status = 'error' - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - suc.reset_tool_data_tables( trans.app ) - return message, status -def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): - # Set metadata on the repository tip. - error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd ) - if error_message: - # If there is an error, display it. - return trans.response.send_redirect( web.url_for( controller='repository', - action='manage_repository', - id=trans.security.encode_id( repository.id ), - message=error_message, - status='error' ) ) -def update_for_browsing( trans, repository, current_working_dir, commit_message='' ): - # This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge - # is not supported by the mercurial API. - # Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all - # added, modified or removed files that have not yet been committed. - repo_dir = repository.repo_path( trans.app ) - repo = hg.repository( suc.get_configured_ui(), repo_dir ) - # The following will delete the disk copy of only the files in the repository. - #os.system( 'hg update -r null > /dev/null 2>&1' ) - files_to_remove_from_disk = [] - files_to_commit = [] - # We may have files on disk in the repo directory that aren't being tracked, so they must be removed. - # The codes used to show the status of files are as follows. - # M = modified - # A = added - # R = removed - # C = clean - # ! = deleted, but still tracked - # ? = not tracked - # I = ignored - # We'll use mercurial's purge extension to remove untracked file. Using this extension requires the - # following entry in the repository's hgrc file which was not required for some time, so we'll add it - # if it's missing. - # [extensions] - # hgext.purge= - lines = repo.opener( 'hgrc', 'rb' ).readlines() - if not '[extensions]\n' in lines: - # No extensions have been added at all, so just append to the file. - fp = repo.opener( 'hgrc', 'a' ) - fp.write( '[extensions]\n' ) - fp.write( 'hgext.purge=\n' ) - fp.close() - elif not 'hgext.purge=\n' in lines: - # The file includes and [extensions] section, but we need to add the - # purge extension. - fp = repo.opener( 'hgrc', 'wb' ) - for line in lines: - if line.startswith( '[extensions]' ): - fp.write( line ) - fp.write( 'hgext.purge=\n' ) - else: - fp.write( line ) - fp.close() - cmd = 'hg purge' - os.chdir( repo_dir ) - proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - return_code = proc.wait() - os.chdir( current_working_dir ) - if return_code != 0: - output = proc.stdout.read( 32768 ) - log.debug( 'hg purge failed in repository directory %s, reason: %s' % ( repo_dir, output ) ) - if files_to_commit: - if not commit_message: - commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit ) - repo.dirstate.write() - repo.commit( user=trans.user.username, text=commit_message ) - cmd = 'hg update > /dev/null 2>&1' - os.chdir( repo_dir ) - proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - return_code = proc.wait() - os.chdir( current_working_dir ) - if return_code != 0: - output = proc.stdout.read( 32768 ) - log.debug( 'hg update > /dev/null 2>&1 failed in repository directory %s, reason: %s' % ( repo_dir, output ) ) diff -r be5107c0321d1a7e7ab0221e7feb711932488bb0 -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 lib/galaxy/webapps/community/controllers/hg.py --- a/lib/galaxy/webapps/community/controllers/hg.py +++ b/lib/galaxy/webapps/community/controllers/hg.py @@ -1,7 +1,7 @@ import os, logging -from galaxy.web.base.controller import * -from galaxy.util.shed_util_common import get_repository_by_name_and_owner -from galaxy.webapps.community.controllers.common import set_repository_metadata +from galaxy import web +from galaxy.web.base.controller import BaseUIController +from galaxy.util.shed_util_common import get_repository_by_name_and_owner, set_repository_metadata from galaxy import eggs eggs.require('mercurial') This diff is so big that we needed to truncate the remainder. https://bitbucket.org/galaxy/galaxy-central/commits/8517df5bb78d/ Changeset: 8517df5bb78d Branch: search User: Kyle Ellrott Date: 2013-01-25 01:49:10 Summary: Getting more of the search engine to work Affected #: 2 files diff -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 -r 8517df5bb78d36a68fea8e9670299ef075e17b0f lib/galaxy/model/search.py --- a/lib/galaxy/model/search.py +++ b/lib/galaxy/model/search.py @@ -51,6 +51,7 @@ print self.query for row in self.query.distinct().all(): out = {} + out['domain'] = self.DOMAIN for col in self.OUTPUT_COLUMNS: out[col] = getattr(row, col) yield out @@ -76,9 +77,11 @@ #Library Searching ################## -class LibraryQuery(QueryBaseClass): +class LibraryDatasetQuery(QueryBaseClass): + DOMAIN = "library_dataset" OUTPUT_COLUMNS = [ 'extended_metadata', 'name', 'id' ] def filter(self, arg): + print "Library", arg if arg.name == 'extended_metadata': self.do_query = True self.query = self.query.join( ExtendedMetadata ) @@ -92,9 +95,16 @@ alias.value == str(ex_meta[f]) ) ) + if arg.name == "name": + self.do_query = True + if arg.mode == "==": + self.query = self.query.filter( LibraryDatasetDatasetAssociation.name == arg.other ) + if arg.mode == "like": + self.query = self.query.filter( LibraryDatasetDatasetAssociation.name.like(arg.other) ) -class LibrarySearch(SearchBaseClass): + +class LibraryDatasetSearch(SearchBaseClass): FIELDS = [ SearchField("name"), SearchField("id"), @@ -104,7 +114,7 @@ @staticmethod def search(trans): query = trans.sa_session.query( LibraryDatasetDatasetAssociation ) - return LibraryQuery(query) + return LibraryDatasetQuery(query) ################## #History Dataset Searching @@ -112,6 +122,7 @@ class HistoryDatasetQuery(QueryBaseClass): + DOMAIN = "history_dataset" OUTPUT_COLUMNS = ['name', 'id'] def filter(self, arg): @@ -139,6 +150,7 @@ class HistoryQuery(QueryBaseClass): + DOMAIN = "history" OUTPUT_COLUMNS = ['name', 'id'] def filter(self, arg): @@ -197,6 +209,7 @@ class WorkflowQuery(QueryBaseClass): + DOMAIN = "workflow" OUTPUT_COLUMNS = ['name', 'id'] def filter(self, arg): @@ -233,7 +246,7 @@ search_mapping = { - 'library' : LibrarySearch, + 'library_dataset' : LibraryDatasetSearch, 'history_dataset' : HistoryDatasetSearch, 'history' : HistorySearch, 'workflow' : WorkflowSearch diff -r bd6d97447ef8736fdeb1ff8d7ea8aa5412468f16 -r 8517df5bb78d36a68fea8e9670299ef075e17b0f lib/galaxy/webapps/galaxy/api/search.py --- a/lib/galaxy/webapps/galaxy/api/search.py +++ b/lib/galaxy/webapps/galaxy/api/search.py @@ -50,6 +50,7 @@ FIELD_NAME = "name" FIELD_ID = "id" + FIELD_DOMAIN = "domain" FIELD_EXTENDED_METADATA = "extended_metadata" FIELD_ANNOTATION = "annotation" FIELD_TAGS = "tags" @@ -86,8 +87,9 @@ o = {} o[ self.FIELD_ID ] = trans.security.encode_id(row['id']) o[ self.FIELD_NAME ] = row['name'] + o[ self.FIELD_DOMAIN ] = row['domain'] - if self.FIELD_EXTENDED_METADATA in result_fields and 'extended_metadata' in row: + if self.FIELD_EXTENDED_METADATA in result_fields and 'extended_metadata.data' in row: o[ self.FIELD_EXTENDED_METADATA ] = row['extended_metadata.data'] if self.FIELD_ANNOTATION in result_fields: https://bitbucket.org/galaxy/galaxy-central/commits/288c79b3b694/ Changeset: 288c79b3b694 Branch: search User: Kyle Ellrott Date: 2013-02-05 20:04:26 Summary: Default merge Affected #: 410 files Diff not available. https://bitbucket.org/galaxy/galaxy-central/commits/87a206380c4b/ Changeset: 87a206380c4b Branch: search User: Kyle Ellrott Date: 2013-02-05 21:34:07 Summary: Using methods from model to produce json api output Affected #: 3 files Diff not available. https://bitbucket.org/galaxy/galaxy-central/commits/dc7e1310ed83/ Changeset: dc7e1310ed83 Branch: search User: Kyle Ellrott Date: 2013-02-06 01:29:23 Summary: Working on the HTML output of the search page Affected #: 2 files Diff not available. https://bitbucket.org/galaxy/galaxy-central/commits/e790dfa809d3/ Changeset: e790dfa809d3 Branch: search User: Kyle Ellrott Date: 2013-02-06 19:40:57 Summary: Adding peek to search page Affected #: 2 files Diff not available. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.