galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
May 2012
- 1 participants
- 61 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c5ba3065b82e/
changeset: c5ba3065b82e
user: jgoecks
date: 2012-05-27 21:54:32
summary: Support for saving and validating different visualization types.
affected #: 3 files
diff -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -305,60 +305,64 @@
decoded_id = trans.security.decode_id( id )
vis = session.query( trans.model.Visualization ).get( decoded_id )
- # Decode the payload
- decoded_payload = config
# Create new VisualizationRevision that will be attached to the viz
vis_rev = trans.model.VisualizationRevision()
vis_rev.visualization = vis
vis_rev.title = vis.title
vis_rev.dbkey = dbkey
+
+ # -- Validate config. --
+
+ if vis.type == 'trackster':
+ def unpack_track( track_json ):
+ """ Unpack a track from its json. """
+ return {
+ "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
+ "hda_ldda": track_json.get('hda_ldda', 'hda'),
+ "name": track_json['name'],
+ "track_type": track_json['track_type'],
+ "prefs": track_json['prefs'],
+ "mode": track_json['mode'],
+ "filters": track_json['filters'],
+ "tool_state": track_json['tool_state']
+ }
- def unpack_track( track_json ):
- """ Unpack a track from its json. """
- return {
- "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
- "hda_ldda": track_json.get('hda_ldda', 'hda'),
- "name": track_json['name'],
- "track_type": track_json['track_type'],
- "prefs": track_json['prefs'],
- "mode": track_json['mode'],
- "filters": track_json['filters'],
- "tool_state": track_json['tool_state']
- }
+ def unpack_collection( collection_json ):
+ """ Unpack a collection from its json. """
+ unpacked_drawables = []
+ drawables = collection_json[ 'drawables' ]
+ for drawable_json in drawables:
+ if 'track_type' in drawable_json:
+ drawable = unpack_track( drawable_json )
+ else:
+ drawable = unpack_collection( drawable_json )
+ unpacked_drawables.append( drawable )
+ return {
+ "name": collection_json.get( 'name', '' ),
+ "obj_type": collection_json[ 'obj_type' ],
+ "drawables": unpacked_drawables,
+ "prefs": collection_json.get( 'prefs' , [] ),
+ "filters": collection_json.get( 'filters', None )
+ }
- def unpack_collection( collection_json ):
- """ Unpack a collection from its json. """
- unpacked_drawables = []
- drawables = collection_json[ 'drawables' ]
- for drawable_json in drawables:
- if 'track_type' in drawable_json:
- drawable = unpack_track( drawable_json )
- else:
- drawable = unpack_collection( drawable_json )
- unpacked_drawables.append( drawable )
- return {
- "name": collection_json.get( 'name', '' ),
- "obj_type": collection_json[ 'obj_type' ],
- "drawables": unpacked_drawables,
- "prefs": collection_json.get( 'prefs' , [] ),
- "filters": collection_json.get( 'filters', None )
- }
+ # TODO: unpack and validate bookmarks:
+ def unpack_bookmarks( bookmarks_json ):
+ return bookmarks_json
- # TODO: unpack and validate bookmarks:
- def unpack_bookmarks( bookmarks_json ):
- return bookmarks_json
-
- # Unpack and validate view content.
- view_content = unpack_collection( decoded_payload[ 'view' ] )
- bookmarks = unpack_bookmarks( decoded_payload[ 'bookmarks' ] )
- vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
- # Viewport from payload
- if 'viewport' in decoded_payload:
- chrom = decoded_payload['viewport']['chrom']
- start = decoded_payload['viewport']['start']
- end = decoded_payload['viewport']['end']
- overview = decoded_payload['viewport']['overview']
- vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+ # Unpack and validate view content.
+ view_content = unpack_collection( config[ 'view' ] )
+ bookmarks = unpack_bookmarks( config[ 'bookmarks' ] )
+ vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
+ # Viewport from payload
+ if 'viewport' in config:
+ chrom = config['viewport']['chrom']
+ start = config['viewport']['start']
+ end = config['viewport']['end']
+ overview = config['viewport']['overview']
+ vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+ elif type == 'circos':
+ # TODO.
+ pass
vis.latest_revision = vis_rev
session.add( vis_rev )
diff -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -234,6 +234,15 @@
rows.append( [location, name] )
return { 'data': rows }
+ # TODO: this is duplicated from visualization controller; remove it once
+ # routing incompatibilities have been resolved.
+ @web.json
+ def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ """
+ Save a visualization; if visualization does not have an ID, a new
+ visualization is created. Returns JSON of visualization.
+ """
+ return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
@web.expose
@web.require_login()
@@ -409,11 +418,7 @@
result = data_provider.get_data( chrom, low, high, int( start_val ), int( max_vals ), **kwargs )
result.update( { 'dataset_type': tracks_dataset_type, 'extra_info': extra_info } )
return result
-
- @web.json
- def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
- return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
-
+
@web.expose
@web.require_login( "see all available libraries" )
def list_libraries( self, trans, **kwargs ):
diff -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -349,7 +349,7 @@
def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="",
visualization_type="" ):
"""
- Create a new visualization
+ Creates a new visualization or returns a form for creating visualization.
"""
visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
if trans.request.method == "POST":
@@ -381,6 +381,14 @@
.add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
help="A description of the visualization; annotation is shown alongside published visualizations."),
template="visualization/create.mako" )
+
+ @web.json
+ def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ """
+ Save a visualization; if visualization does not have an ID, a new
+ visualization is created. Returns JSON of visualization.
+ """
+ return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
@web.expose
@web.require_login( "edit visualizations" )
https://bitbucket.org/galaxy/galaxy-central/changeset/03cb8ee86726/
changeset: 03cb8ee86726
user: jgoecks
date: 2012-05-27 22:09:16
summary: Include 'mixin' in names of many common controller mixins.
affected #: 22 files
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -3,7 +3,7 @@
from galaxy.web.framework.helpers import to_unicode
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.json import *
-from galaxy.web.base.controller import UsesHistory
+from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.tools.data import ToolDataTableManager
log = logging.getLogger(__name__)
@@ -63,8 +63,6 @@
if gitd:
destination = None
- alldone = True
- indexjobs = gitd.deferred.params[ 'indexjobs' ]
tdtman = ToolDataTableManager()
xmltree = tdtman.load_from_config_file(app.config.tool_data_table_config_path)
for node in xmltree:
@@ -165,14 +163,6 @@
self._check_link( fasta, target )
for line in location:
self._add_line( line[ 'file' ], line[ 'line' ] )
- for indexjob in indexjobs:
- js = sa_session.query( model.Job ).filter_by( id=indexjob ).first()
- if js.state not in [ 'ok', 'done', 'error' ]:
- alldone = False
- if alldone:
- gitd.deferred.state = 'ok'
- sa_session.add( gitd.deferred )
- sa_session.flush()
def _check_link( self, targetfile, symlink ):
target = os.path.relpath( targetfile, os.path.dirname( symlink ) )
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/tools/imp_exp/__init__.py
--- a/lib/galaxy/tools/imp_exp/__init__.py
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -4,7 +4,7 @@
from galaxy.web.framework.helpers import to_unicode
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.json import *
-from galaxy.web.base.controller import UsesHistory
+from galaxy.web.base.controller import UsesHistoryMixin
log = logging.getLogger(__name__)
@@ -42,7 +42,7 @@
toolbox.tools_by_id[ history_imp_tool.id ] = history_imp_tool
log.debug( "Loaded history import tool: %s", history_imp_tool.id )
-class JobImportHistoryArchiveWrapper( object, UsesHistory, UsesAnnotations ):
+class JobImportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ):
"""
Class provides support for performing jobs that import a history from
an archive.
@@ -263,7 +263,7 @@
jiha.job.stderr += "Error cleaning up history import job: %s" % e
db_session.flush()
-class JobExportHistoryArchiveWrapper( object, UsesHistory, UsesAnnotations ):
+class JobExportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ):
"""
Class provides support for performing jobs that export a history to an
archive.
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/datasets.py
--- a/lib/galaxy/web/api/datasets.py
+++ b/lib/galaxy/web/api/datasets.py
@@ -10,7 +10,7 @@
log = logging.getLogger( __name__ )
-class DatasetsController( BaseAPIController, UsesHistoryDatasetAssociation ):
+class DatasetsController( BaseAPIController, UsesHistoryMixinDatasetAssociationMixin ):
@web.expose_api
def index( self, trans, hda_id, **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -12,7 +12,7 @@
log = logging.getLogger( __name__ )
-class HistoriesController( BaseAPIController, UsesHistory ):
+class HistoriesController( BaseAPIController, UsesHistoryMixin ):
@web.expose_api
def index( self, trans, deleted='False', **kwd ):
@@ -153,7 +153,7 @@
POST /api/histories/deleted/{encoded_quota_id}/undelete
Undeletes a quota
"""
- history = self.get_history( trans, id, check_ownership=True, check_accessible=False, deleted=True )
+ history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
history.deleted = False
trans.sa_session.add( history )
trans.sa_session.flush()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/history_contents.py
--- a/lib/galaxy/web/api/history_contents.py
+++ b/lib/galaxy/web/api/history_contents.py
@@ -12,7 +12,7 @@
log = logging.getLogger( __name__ )
-class HistoryContentsController( BaseAPIController, UsesHistoryDatasetAssociation, UsesHistory, UsesLibrary, UsesLibraryItems ):
+class HistoryContentsController( BaseAPIController, UsesHistoryMixinDatasetAssociationMixin, UsesHistoryMixin, UsesLibraryMixin, UsesLibraryMixinItems ):
@web.expose_api
def index( self, trans, history_id, **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/library_contents.py
--- a/lib/galaxy/web/api/library_contents.py
+++ b/lib/galaxy/web/api/library_contents.py
@@ -10,7 +10,7 @@
log = logging.getLogger( __name__ )
-class LibraryContentsController( BaseAPIController, UsesLibrary, UsesLibraryItems ):
+class LibraryContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
@web.expose_api
def index( self, trans, library_id, **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/quotas.py
--- a/lib/galaxy/web/api/quotas.py
+++ b/lib/galaxy/web/api/quotas.py
@@ -2,7 +2,7 @@
API operations on Quota objects.
"""
import logging
-from galaxy.web.base.controller import BaseAPIController, Admin, UsesQuota, url_for
+from galaxy.web.base.controller import BaseAPIController, Admin, UsesQuotaMixin, url_for
from galaxy import web, util
from elementtree.ElementTree import XML
@@ -14,7 +14,7 @@
log = logging.getLogger( __name__ )
-class QuotaAPIController( BaseAPIController, Admin, AdminActions, UsesQuota, QuotaParamParser ):
+class QuotaAPIController( BaseAPIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
@web.expose_api
@web.require_admin
def index( self, trans, deleted='False', **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -177,7 +177,11 @@
def not_implemented( self, trans, **kwd ):
raise HTTPNotImplemented()
-class SharableItemSecurity:
+#
+# -- Mixins for working with Galaxy objects. --
+#
+
+class SharableItemSecurityMixin:
""" Mixin for handling security for sharable items. """
def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
""" Security checks for an item: checks if (a) user owns item or (b) item is accessible to user. """
@@ -197,11 +201,7 @@
raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
return item
-#
-# TODO: need to move UsesHistory, etc. mixins to better location - perhaps lib/galaxy/model/XXX ?
-#
-
-class UsesHistoryDatasetAssociation:
+class UsesHistoryMixinDatasetAssociationMixin:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False ):
""" Get an HDA object by id. """
@@ -259,14 +259,14 @@
truncated = False
return truncated, dataset_data
-class UsesLibrary:
+class UsesLibraryMixin:
def get_library( self, trans, id, check_ownership=False, check_accessible=True ):
l = self.get_object( trans, id, 'Library' )
if check_accessible and not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( trans.get_current_user_roles(), l ) ):
error( "LibraryFolder is not accessible to the current user" )
return l
-class UsesLibraryItems( SharableItemSecurity ):
+class UsesLibraryMixinItems( SharableItemSecurityMixin ):
def get_library_folder( self, trans, id, check_ownership=False, check_accessible=True ):
return self.get_object( trans, id, 'LibraryFolder', check_ownership=False, check_accessible=check_accessible )
def get_library_dataset_dataset_association( self, trans, id, check_ownership=False, check_accessible=True ):
@@ -274,7 +274,7 @@
def get_library_dataset( self, trans, id, check_ownership=False, check_accessible=True ):
return self.get_object( trans, id, 'LibraryDataset', check_ownership=False, check_accessible=check_accessible )
-class UsesVisualization( SharableItemSecurity ):
+class UsesVisualizationMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use Visualization objects. """
viz_types = [ "trackster", "circos" ]
@@ -522,7 +522,7 @@
return visualization
-class UsesStoredWorkflow( SharableItemSecurity ):
+class UsesStoredWorkflowMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use StoredWorkflow objects. """
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a StoredWorkflow from the database by id, verifying ownership. """
@@ -560,7 +560,7 @@
# Connections by input name
step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections )
-class UsesHistory( SharableItemSecurity ):
+class UsesHistoryMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use History objects. """
def get_history( self, trans, id, check_ownership=True, check_accessible=False, deleted=None ):
"""Get a History from the database by id, verifying ownership."""
@@ -580,7 +580,7 @@
query = query.filter( trans.model.Dataset.purged == False )
return query.all()
-class UsesFormDefinitions:
+class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form objects."""
def get_all_forms( self, trans, all_versions=False, filter=None, form_type='All' ):
"""
@@ -1342,7 +1342,7 @@
selected_value=selected_value,
refresh_on_change=True )
-class Sharable:
+class SharableMixin:
""" Mixin for a controller that manages an item that can be shared. """
# -- Implemented methods. --
@@ -1433,7 +1433,7 @@
""" Return item based on id. """
raise "Unimplemented Method"
-class UsesQuota( object ):
+class UsesQuotaMixin( object ):
def get_quota( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
return self.get_object( trans, id, 'Quota', check_ownership=False, check_accessible=False, deleted=deleted )
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py
+++ b/lib/galaxy/web/controllers/admin.py
@@ -428,7 +428,7 @@
def build_initial_query( self, trans, **kwd ):
return trans.sa_session.query( self.model_class )
-class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuota, QuotaParamParser ):
+class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
user_list_grid = UserListGrid()
role_list_grid = RoleListGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -150,7 +150,7 @@
.filter( model.History.deleted==False ) \
.filter( self.model_class.visible==True )
-class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistory, UsesHistoryDatasetAssociation, UsesItemRatings ):
+class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistoryMixin, UsesHistoryMixinDatasetAssociationMixin, UsesItemRatings ):
stored_list_grid = HistoryDatasetAssociationListGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/external_service.py
--- a/lib/galaxy/web/controllers/external_service.py
+++ b/lib/galaxy/web/controllers/external_service.py
@@ -63,7 +63,7 @@
grids.GridAction( "Create new external service", dict( controller='external_service', action='create_external_service' ) )
]
-class ExternalService( BaseUIController, UsesFormDefinitions ):
+class ExternalService( BaseUIController, UsesFormDefinitionsMixin ):
external_service_grid = ExternalServiceGrid()
@web.expose
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -190,7 +190,7 @@
# A public history is published, has a slug, and is not deleted.
return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-class HistoryController( BaseUIController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ):
+class HistoryController( BaseUIController, SharableMixin, UsesAnnotations, UsesItemRatings, UsesHistoryMixin ):
@web.expose
def index( self, trans ):
return ""
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py
+++ b/lib/galaxy/web/controllers/library_common.py
@@ -68,7 +68,7 @@
pass
os.rmdir( tmpd )
-class LibraryCommon( BaseUIController, UsesFormDefinitions ):
+class LibraryCommon( BaseUIController, UsesFormDefinitionsMixin ):
@web.json
def library_item_updates( self, trans, ids=None, states=None ):
# Avoid caching
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py
+++ b/lib/galaxy/web/controllers/page.py
@@ -272,8 +272,8 @@
# Default behavior:
_BaseHTMLProcessor.unknown_endtag( self, tag )
-class PageController( BaseUIController, Sharable, UsesAnnotations, UsesHistory,
- UsesStoredWorkflow, UsesHistoryDatasetAssociation, UsesVisualization, UsesItemRatings ):
+class PageController( BaseUIController, SharableMixin, UsesAnnotations, UsesHistoryMixin,
+ UsesStoredWorkflowMixin, UsesHistoryMixinDatasetAssociationMixin, UsesVisualizationMixin, UsesItemRatings ):
_page_list = PageListGrid()
_all_published_list = PageAllPublishedGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/request_type.py
--- a/lib/galaxy/web/controllers/request_type.py
+++ b/lib/galaxy/web/controllers/request_type.py
@@ -72,7 +72,7 @@
grids.GridAction( "Create new request type", dict( controller='request_type', action='create_request_type' ) )
]
-class RequestType( BaseUIController, UsesFormDefinitions ):
+class RequestType( BaseUIController, UsesFormDefinitionsMixin ):
request_type_grid = RequestTypeGrid()
@web.expose
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -94,7 +94,7 @@
return query
return query.filter_by( sample_id=trans.security.decode_id( sample_id ) )
-class RequestsAdmin( BaseUIController, UsesFormDefinitions ):
+class RequestsAdmin( BaseUIController, UsesFormDefinitionsMixin ):
request_grid = AdminRequestsGrid()
datatx_grid = DataTransferGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/requests_common.py
--- a/lib/galaxy/web/controllers/requests_common.py
+++ b/lib/galaxy/web/controllers/requests_common.py
@@ -93,7 +93,7 @@
confirm="Samples cannot be added to this request after it is submitted. Click OK to submit." )
]
-class RequestsCommon( BaseUIController, UsesFormDefinitions ):
+class RequestsCommon( BaseUIController, UsesFormDefinitionsMixin ):
@web.json
def sample_state_updates( self, trans, ids=None, states=None ):
# Avoid caching
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py
+++ b/lib/galaxy/web/controllers/root.py
@@ -11,7 +11,7 @@
log = logging.getLogger( __name__ )
-class RootController( BaseUIController, UsesHistory, UsesAnnotations ):
+class RootController( BaseUIController, UsesHistoryMixin, UsesAnnotations ):
@web.expose
def default(self, trans, target1=None, target2=None, **kwd):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -163,7 +163,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( self.model_class.user_id == trans.user.id )
-class TracksController( BaseUIController, UsesVisualization, UsesHistoryDatasetAssociation, Sharable ):
+class TracksController( BaseUIController, UsesVisualizationMixin, UsesHistoryMixinDatasetAssociationMixin, SharableMixin ):
"""
Controller for track browser interface. Handles building a new browser from
datasets in the current history, and display of the resulting browser.
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -38,7 +38,7 @@
def build_initial_query( self, trans, **kwd ):
return trans.sa_session.query( self.model_class ).filter( self.model_class.user_id == trans.user.id )
-class User( BaseUIController, UsesFormDefinitions ):
+class User( BaseUIController, UsesFormDefinitionsMixin ):
user_openid_grid = UserOpenIDGrid()
installed_len_files = None
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -68,8 +68,8 @@
return query.filter( self.model_class.deleted==False ).filter( self.model_class.published==True )
-class VisualizationController( BaseUIController, Sharable, UsesAnnotations,
- UsesHistoryDatasetAssociation, UsesVisualization,
+class VisualizationController( BaseUIController, SharableMixin, UsesAnnotations,
+ UsesHistoryMixinDatasetAssociationMixin, UsesVisualizationMixin,
UsesItemRatings ):
_user_list_grid = VisualizationListGrid()
_published_list_grid = VisualizationAllPublishedGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -105,7 +105,7 @@
if self.cur_tag == self.target_tag:
self.tag_content += text
-class WorkflowController( BaseUIController, Sharable, UsesStoredWorkflow, UsesAnnotations, UsesItemRatings ):
+class WorkflowController( BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesAnnotations, UsesItemRatings ):
stored_list_grid = StoredWorkflowListGrid()
published_list_grid = StoredWorkflowAllPublishedGrid()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Provide generic support for saving visualizations.
by Bitbucket 27 May '12
by Bitbucket 27 May '12
27 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fdc3f20a46d3/
changeset: fdc3f20a46d3
user: jgoecks
date: 2012-05-27 18:34:52
summary: Provide generic support for saving visualizations.
affected #: 3 files
diff -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -280,41 +280,91 @@
viz_types = [ "trackster", "circos" ]
len_files = None
-
+
def create_visualization( self, trans, title, slug, type, dbkey, annotation=None, config={} ):
- user = trans.get_user()
+ """ Create visualiation and first revision. """
+ visualization = self._create_visualization( trans, title, type, dbkey, slug, annotation )
- # Error checking.
- title_err = slug_err = ""
- if not title:
- title_err = "visualization name is required"
- elif not slug:
- slug_err = "visualization id is required"
- elif not VALID_SLUG_RE.match( slug ):
- slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
- elif trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
- slug_err = "visualization id must be unique"
-
- if title_err or slug_err:
- return { 'title_err': title_err, 'slug_err': slug_err }
-
- # Create visualization
- visualization = trans.model.Visualization( user=user, title=title, slug=slug, dbkey=dbkey, type=type )
- if annotation:
- annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
-
- # And the first visualization revision
- revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config={}, dbkey=dbkey )
+ # Create and save first visualization revision
+ revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config=config, dbkey=dbkey )
visualization.latest_revision = revision
-
- # Persist
session = trans.sa_session
- session.add(visualization)
- session.add(revision)
+ session.add( revision )
session.flush()
return visualization
+
+ def save_visualization( self, trans, config, type, id=None, title=None, dbkey=None, slug=None, annotation=None ):
+ session = trans.sa_session
+
+ # Create/get visualization.
+ if not id:
+ # Create new visualization.
+ vis = self._create_visualization( trans, title, type, dbkey, slug, annotation )
+ else:
+ decoded_id = trans.security.decode_id( id )
+ vis = session.query( trans.model.Visualization ).get( decoded_id )
+
+ # Decode the payload
+ decoded_payload = config
+ # Create new VisualizationRevision that will be attached to the viz
+ vis_rev = trans.model.VisualizationRevision()
+ vis_rev.visualization = vis
+ vis_rev.title = vis.title
+ vis_rev.dbkey = dbkey
+
+ def unpack_track( track_json ):
+ """ Unpack a track from its json. """
+ return {
+ "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
+ "hda_ldda": track_json.get('hda_ldda', 'hda'),
+ "name": track_json['name'],
+ "track_type": track_json['track_type'],
+ "prefs": track_json['prefs'],
+ "mode": track_json['mode'],
+ "filters": track_json['filters'],
+ "tool_state": track_json['tool_state']
+ }
+
+ def unpack_collection( collection_json ):
+ """ Unpack a collection from its json. """
+ unpacked_drawables = []
+ drawables = collection_json[ 'drawables' ]
+ for drawable_json in drawables:
+ if 'track_type' in drawable_json:
+ drawable = unpack_track( drawable_json )
+ else:
+ drawable = unpack_collection( drawable_json )
+ unpacked_drawables.append( drawable )
+ return {
+ "name": collection_json.get( 'name', '' ),
+ "obj_type": collection_json[ 'obj_type' ],
+ "drawables": unpacked_drawables,
+ "prefs": collection_json.get( 'prefs' , [] ),
+ "filters": collection_json.get( 'filters', None )
+ }
+
+ # TODO: unpack and validate bookmarks:
+ def unpack_bookmarks( bookmarks_json ):
+ return bookmarks_json
+
+ # Unpack and validate view content.
+ view_content = unpack_collection( decoded_payload[ 'view' ] )
+ bookmarks = unpack_bookmarks( decoded_payload[ 'bookmarks' ] )
+ vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
+ # Viewport from payload
+ if 'viewport' in decoded_payload:
+ chrom = decoded_payload['viewport']['chrom']
+ start = decoded_payload['viewport']['start']
+ end = decoded_payload['viewport']['end']
+ overview = decoded_payload['viewport']['overview']
+ vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+
+ vis.latest_revision = vis_rev
+ session.add( vis_rev )
+ session.flush()
+ encoded_id = trans.security.encode_id( vis.id )
+ return { "vis_id": encoded_id, "url": url_for( action='browser', id=encoded_id ) }
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
@@ -432,6 +482,41 @@
config['viewport'] = latest_revision.config['viewport']
return config
+
+ # -- Helper functions --
+
+ def _create_visualization( self, trans, title, type, dbkey, slug=None, annotation=None ):
+ """ Create visualization but not first revision. Returns Visualization object. """
+ user = trans.get_user()
+
+ # Error checking.
+ title_err = slug_err = ""
+ if not title:
+ title_err = "visualization name is required"
+ elif slug and not VALID_SLUG_RE.match( slug ):
+ slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif slug and trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
+ slug_err = "visualization identifier must be unique"
+
+ if title_err or slug_err:
+ return { 'title_err': title_err, 'slug_err': slug_err }
+
+
+ # Create visualization
+ visualization = trans.model.Visualization( user=user, title=title, dbkey=dbkey, type=type )
+ if slug:
+ visualization.slug = slug
+ else:
+ self.create_item_slug( trans.sa_session, visualization )
+ if annotation:
+ annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
+
+ session = trans.sa_session
+ session.add( visualization )
+ session.flush()
+
+ return visualization
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
@@ -1255,7 +1340,9 @@
class Sharable:
""" Mixin for a controller that manages an item that can be shared. """
- # Implemented methods.
+
+ # -- Implemented methods. --
+
@web.expose
@web.require_login( "share Galaxy items" )
def set_public_username( self, trans, id, username, **kwargs ):
@@ -1268,42 +1355,50 @@
trans.sa_session.flush
return self.sharing( trans, id, **kwargs )
- # Abstract methods.
+ # -- Abstract methods. --
+
@web.expose
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
""" Set item slug asynchronously. """
raise "Unimplemented Method"
+
@web.expose
@web.require_login( "share Galaxy items" )
def sharing( self, trans, id, **kwargs ):
""" Handle item sharing. """
raise "Unimplemented Method"
+
@web.expose
@web.require_login( "share Galaxy items" )
def share( self, trans, id=None, email="", **kwd ):
""" Handle sharing an item with a particular user. """
raise "Unimplemented Method"
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
""" Display item by username and slug. """
raise "Unimplemented Method"
- @web.expose
+
@web.json
@web.require_login( "get item name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns item's name and link. """
raise "Unimplemented Method"
+
@web.expose
@web.require_login("get item content asynchronously")
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
raise "Unimplemented Method"
- # Helper methods.
+
+ # -- Helper methods. --
+
def _make_item_accessible( self, sa_session, item ):
""" Makes item accessible--viewable and importable--and sets item's slug. Does not flush/commit changes, however. Item must have name, user, importable, and slug attributes. """
item.importable = True
self.create_item_slug( sa_session, item )
+
def create_item_slug( self, sa_session, item ):
""" Create item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
if item.slug is None or item.slug == "":
@@ -1323,12 +1418,13 @@
slug = slug_base
count = 1
while sa_session.query( item.__class__ ).filter_by( user=item.user, slug=slug, importable=True ).count() != 0:
- # Slug taken; choose a new slug based on count. This approach can handle numerous histories with the same name gracefully.
+ # Slug taken; choose a new slug based on count. This approach can handle numerous items with the same name gracefully.
slug = '%s-%i' % ( slug_base, count )
count += 1
item.slug = slug
return True
return False
+
def get_item( self, trans, id ):
""" Return item based on id. """
raise "Unimplemented Method"
diff -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -163,7 +163,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( self.model_class.user_id == trans.user.id )
-class TracksController( BaseUIController, UsesVisualization, UsesHistoryDatasetAssociation ):
+class TracksController( BaseUIController, UsesVisualization, UsesHistoryDatasetAssociation, Sharable ):
"""
Controller for track browser interface. Handles building a new browser from
datasets in the current history, and display of the resulting browser.
@@ -411,83 +411,8 @@
return result
@web.json
- def save( self, trans, **kwargs ):
- session = trans.sa_session
- vis_id = "undefined"
- if 'vis_id' in kwargs:
- vis_id = kwargs['vis_id'].strip('"')
- dbkey = kwargs['dbkey']
- # Lookup or create Visualization object
- if vis_id == "undefined": # new vis
- vis = model.Visualization()
- vis.user = trans.user
- vis.title = kwargs['title']
- vis.type = "trackster"
- vis.dbkey = dbkey
- session.add( vis )
- else:
- decoded_id = trans.security.decode_id( vis_id )
- vis = session.query( model.Visualization ).get( decoded_id )
- # Decode the payload
- decoded_payload = simplejson.loads( kwargs['payload'] )
- # Create new VisualizationRevision that will be attached to the viz
- vis_rev = model.VisualizationRevision()
- vis_rev.visualization = vis
- vis_rev.title = vis.title
- vis_rev.dbkey = dbkey
-
- def unpack_track( track_json ):
- """ Unpack a track from its json. """
- return {
- "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
- "hda_ldda": track_json.get('hda_ldda', "hda"),
- "name": track_json['name'],
- "track_type": track_json['track_type'],
- "prefs": track_json['prefs'],
- "mode": track_json['mode'],
- "filters": track_json['filters'],
- "tool_state": track_json['tool_state']
- }
-
- def unpack_collection( collection_json ):
- """ Unpack a collection from its json. """
- unpacked_drawables = []
- drawables = collection_json[ 'drawables' ]
- for drawable_json in drawables:
- if 'track_type' in drawable_json:
- drawable = unpack_track( drawable_json )
- else:
- drawable = unpack_collection( drawable_json )
- unpacked_drawables.append( drawable )
- return {
- "name": collection_json.get( 'name', '' ),
- "obj_type": collection_json[ 'obj_type' ],
- "drawables": unpacked_drawables,
- "prefs": collection_json.get( 'prefs' , [] ),
- "filters": collection_json.get( 'filters', None )
- }
-
- # TODO: unpack and validate bookmarks:
- def unpack_bookmarks( bookmarks_json ):
- return bookmarks_json
-
- # Unpack and validate view content.
- view_content = unpack_collection( decoded_payload[ 'view' ] )
- bookmarks = unpack_bookmarks( decoded_payload[ 'bookmarks' ] )
- vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
- # Viewport from payload
- if 'viewport' in decoded_payload:
- chrom = decoded_payload['viewport']['chrom']
- start = decoded_payload['viewport']['start']
- end = decoded_payload['viewport']['end']
- overview = decoded_payload['viewport']['overview']
- vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
-
- vis.latest_revision = vis_rev
- session.add( vis_rev )
- session.flush()
- encoded_id = trans.security.encode_id(vis.id)
- return { "vis_id": encoded_id, "url": url_for( action='browser', id=encoded_id ) }
+ def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
@web.expose
@web.require_login( "see all available libraries" )
diff -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -209,10 +209,11 @@
url: "${h.url_for( action='save' )}",
type: "POST",
data: {
- 'vis_id': view.vis_id,
+ 'id': view.vis_id,
'title': view.name,
'dbkey': view.dbkey,
- 'payload': JSON.stringify(payload)
+ 'type': 'trackster',
+ 'config': JSON.stringify(payload)
},
dataType: "json",
success: function(vis_info) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Provide a modified check_galaxy.py that is suitable for use with nagios.
by Bitbucket 26 May '12
by Bitbucket 26 May '12
26 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f4c4ba7be3d1/
changeset: f4c4ba7be3d1
user: natefoo
date: 2012-05-26 21:00:36
summary: Provide a modified check_galaxy.py that is suitable for use with nagios.
affected #: 3 files
diff -r 62bdb265d3007d4f761a1defa82a44d888c30bfd -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 contrib/nagios/README
--- /dev/null
+++ b/contrib/nagios/README
@@ -0,0 +1,1 @@
+Nagios checks for Galaxy. check_galaxy is used to call check_galaxy.py.
diff -r 62bdb265d3007d4f761a1defa82a44d888c30bfd -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 contrib/nagios/check_galaxy
--- /dev/null
+++ b/contrib/nagios/check_galaxy
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+if [ -z "$3" ]; then
+ echo "usage: check_galaxy <server><username><password>"
+ exit 3
+fi
+
+here=`dirname $0`
+var="$HOME/.check_galaxy/$1"
+
+touch $var/iterations
+iterations=`cat $var/iterations`
+if [ -z "$iterations" ]; then
+ iterations=0
+fi
+
+new_history=''
+if [ $iterations -gt 96 ]; then
+ new_history='-n'
+ echo 0 > $var/iterations
+else
+ echo `expr $iterations + 1` > $var/iterations
+fi
+
+date >> $var/log
+status=`python $here/check_galaxy.py $new_history $1 $2 $3 2>&1 | tee -a $var/log | tail -n 1`
+
+echo "$status"
+
+case "$status" in
+ "Exception: Tool never finished")
+ exit 1
+ ;;
+ "OK")
+ exit 0
+ ;;
+ *)
+ exit 2
+ ;;
+esac
diff -r 62bdb265d3007d4f761a1defa82a44d888c30bfd -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 contrib/nagios/check_galaxy.py
--- /dev/null
+++ b/contrib/nagios/check_galaxy.py
@@ -0,0 +1,393 @@
+#!/usr/bin/env python
+"""
+check_galaxy can be run by hand, although it is meant to run from cron
+via the check_galaxy.sh script in Galaxy's cron/ directory.
+"""
+
+import socket, sys, os, time, tempfile, filecmp, htmllib, formatter, getopt
+from user import home
+
+import warnings
+with warnings.catch_warnings():
+ warnings.simplefilter('ignore')
+ import twill
+ import twill.commands as tc
+
+# options
+if os.environ.has_key( "DEBUG" ):
+ debug = os.environ["DEBUG"]
+else:
+ debug = False
+
+test_data_dir = os.path.join( os.path.dirname( __file__ ), 'check_galaxy_data' )
+# what tools to run - not so pretty
+tools = {
+ "Extract+genomic+DNA+1" :
+ [
+ {
+ "inputs" :
+ (
+ {
+ "file_path" : os.path.join( test_data_dir, "1.bed" ),
+ "dbkey" : "hg17",
+ },
+
+ )
+ },
+ { "check_file" : os.path.join( test_data_dir, "extract_genomic_dna_out1.fasta" ) },
+ {
+ "tool_run_options" :
+ {
+ "input" : "1.bed",
+ "interpret_features" : "yes",
+ "index_source" : "cached",
+ "out_format" : "fasta"
+ }
+ }
+ ]
+}
+
+# handle arg(s)
+def usage():
+ print "usage: check_galaxy.py <server><username><password>"
+ sys.exit(1)
+
+try:
+ opts, args = getopt.getopt( sys.argv[1:], 'n' )
+except getopt.GetoptError, e:
+ print str(e)
+ usage()
+if len( args ) < 1:
+ usage()
+server = args[0]
+username = args[1]
+password = args[2]
+
+if server.endswith(".g2.bx.psu.edu"):
+ if debug:
+ print "Checking a PSU Galaxy server, using maint file"
+ maint = "/errordocument/502/%s/maint" % args[0].split('.', 1)[0]
+else:
+ maint = None
+
+new_history = False
+for o, a in opts:
+ if o == "-n":
+ if debug:
+ print "Specified -n, will create a new history"
+ new_history = True
+ else:
+ usage()
+
+# state information
+var_dir = os.path.join( home, ".check_galaxy", server )
+if not os.access( var_dir, os.F_OK ):
+ os.makedirs( var_dir, 0700 )
+
+# default timeout for twill browser is never
+socket.setdefaulttimeout(300)
+
+# user-agent
+tc.agent("Mozilla/5.0 (compatible; check_galaxy/0.1)")
+tc.config('use_tidy', 0)
+
+class Browser:
+
+ def __init__(self):
+ self.server = server
+ self.maint = maint
+ self.tool = None
+ self.tool_opts = None
+ self.id = None
+ self.status = None
+ self.check_file = None
+ self.hid = None
+ self.cookie_jar = os.path.join( var_dir, "cookie_jar" )
+ dprint("cookie jar path: %s" % self.cookie_jar)
+ if not os.access(self.cookie_jar, os.R_OK):
+ dprint("no cookie jar at above path, creating")
+ tc.save_cookies(self.cookie_jar)
+ tc.load_cookies(self.cookie_jar)
+
+ def get(self, path):
+ tc.go("http://%s%s" % (self.server, path))
+ tc.code(200)
+
+ def reset(self):
+ self.tool = None
+ self.tool_opts = None
+ self.id = None
+ self.status = None
+ self.check_file = None
+ self.delete_datasets()
+ self.get("/root/history")
+ p = didParser()
+ p.feed(tc.browser.get_html())
+ if len(p.dids) > 0:
+ print "Remaining datasets ids:", " ".join( p.dids )
+ raise Exception, "History still contains datasets after attempting to delete them"
+ if new_history:
+ self.get("/history/delete_current")
+ tc.save_cookies(self.cookie_jar)
+
+ def check_redir(self, url):
+ try:
+ tc.get_browser()._browser.set_handle_redirect(False)
+ tc.go(url)
+ tc.code(302)
+ tc.get_browser()._browser.set_handle_redirect(True)
+ dprint( "%s is returning redirect (302)" % url )
+ return(True)
+ except twill.errors.TwillAssertionError, e:
+ tc.get_browser()._browser.set_handle_redirect(True)
+ dprint( "%s is not returning redirect (302): %s" % (url, e) )
+ code = tc.browser.get_code()
+ if code == 502:
+ is_maint = self.check_maint()
+ if is_maint:
+ dprint( "Galaxy is down, but a maint file was found, so not sending alert" )
+ sys.exit(0)
+ else:
+ print "Galaxy is down (code 502)"
+ sys.exit(1)
+ return(False)
+
+ # checks for a maint file
+ def check_maint(self):
+ if self.maint is None:
+ #dprint( "Warning: unable to check maint file for %s" % self.server )
+ return(False)
+ try:
+ self.get(self.maint)
+ return(True)
+ except twill.errors.TwillAssertionError, e:
+ return(False)
+
+ def login(self, user, pw):
+ self.get("/user/login")
+ tc.fv("1", "email", user)
+ tc.fv("1", "password", pw)
+ tc.submit("Login")
+ tc.code(200)
+ if len(tc.get_browser().get_all_forms()) > 0:
+ # uh ohs, fail
+ p = userParser()
+ p.feed(tc.browser.get_html())
+ if p.no_user:
+ dprint("user does not exist, will try creating")
+ self.create_user(user, pw)
+ elif p.bad_pw:
+ raise Exception, "Password is incorrect"
+ else:
+ raise Exception, "Unknown error logging in"
+ tc.save_cookies(self.cookie_jar)
+
+ def create_user(self, user, pw):
+ self.get("/user/create")
+ tc.fv("1", "email", user)
+ tc.fv("1", "password", pw)
+ tc.fv("1", "confirm", pw)
+ tc.submit("Submit")
+ tc.code(200)
+ if len(tc.get_browser().get_all_forms()) > 0:
+ p = userParser()
+ p.feed(tc.browser.get_html())
+ if p.already_exists:
+ raise Exception, 'The user you were trying to create already exists'
+
+ def upload(self, input):
+ self.get("/tool_runner/index?tool_id=upload1")
+ tc.fv("1","file_type", "bed")
+ tc.fv("1","dbkey", input.get('dbkey', '?'))
+ tc.formfile("1","file_data", input['file_path'])
+ tc.submit("runtool_btn")
+ tc.code(200)
+
+ def runtool(self):
+ self.get("/tool_runner/index?tool_id=%s" % self.tool)
+ for k, v in self.tool_opts.items():
+ tc.fv("1", k, v)
+ tc.submit("runtool_btn")
+ tc.code(200)
+
+ def wait(self):
+ sleep_amount = 1
+ count = 0
+ maxiter = 16
+ while count < maxiter:
+ count += 1
+ self.get("/root/history")
+ page = tc.browser.get_html()
+ if page.find( '<!-- running: do not change this comment, used by TwillTestCase.wait -->' ) > -1:
+ time.sleep( sleep_amount )
+ sleep_amount += 1
+ else:
+ break
+ if count == maxiter:
+ raise Exception, "Tool never finished"
+
+ def check_status(self):
+ self.get("/root/history")
+ p = historyParser()
+ p.feed(tc.browser.get_html())
+ if p.status != "ok":
+ self.get("/datasets/%s/stderr" % p.id)
+ print tc.browser.get_html()
+ raise Exception, "HDA %s NOT OK: %s" % (p.id, p.status)
+ self.id = p.id
+ self.status = p.status
+ #return((p.id, p.status))
+
+ def diff(self):
+ self.get("/datasets/%s/display?to_ext=%s" % (self.id, self.tool_opts.get('out_format', 'fasta')))
+ data = tc.browser.get_html()
+ tmp = tempfile.mkstemp()
+ dprint("tmp file: %s" % tmp[1])
+ tmpfh = os.fdopen(tmp[0], 'w')
+ tmpfh.write(data)
+ tmpfh.close()
+ if filecmp.cmp(tmp[1], self.check_file):
+ dprint("Tool output is as expected")
+ else:
+ if not debug:
+ os.remove(tmp[1])
+ raise Exception, "Tool output differs from expected"
+ if not debug:
+ os.remove(tmp[1])
+
+ def delete_datasets(self):
+ self.get("/root/history")
+ p = didParser()
+ p.feed(tc.browser.get_html())
+ dids = p.dids
+ for did in dids:
+ self.get("/datasets/%s/delete" % did)
+
+ def check_if_logged_in(self):
+ self.get("/user?cntrller=user")
+ p = loggedinParser()
+ p.feed(tc.browser.get_html())
+ return p.logged_in
+
+class userParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.in_span = False
+ self.in_div = False
+ self.no_user = False
+ self.bad_pw = False
+ self.already_exists = False
+ def start_span(self, attrs):
+ self.in_span = True
+ def start_div(self, attrs):
+ self.in_div = True
+ def end_span(self):
+ self.in_span = False
+ def end_div(self):
+ self.in_div = False
+ def handle_data(self, data):
+ if self.in_span or self.in_div:
+ if data == "No such user (please note that login is case sensitive)":
+ self.no_user = True
+ elif data == "Invalid password":
+ self.bad_pw = True
+ elif data == "User with that email already exists":
+ self.already_exists = True
+
+class historyParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.status = None
+ self.id = None
+ def start_div(self, attrs):
+ # find the top history item
+ for i in attrs:
+ if i[0] == "class" and i[1].startswith("historyItemWrapper historyItem historyItem-"):
+ self.status = i[1].rsplit("historyItemWrapper historyItem historyItem-", 1)[1]
+ dprint("status: %s" % self.status)
+ if i[0] == "id" and i[1].startswith("historyItem-"):
+ self.id = i[1].rsplit("historyItem-", 1)[1]
+ dprint("id: %s" % self.id)
+ if self.status is not None:
+ self.reset()
+
+class didParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.dids = []
+ def start_div(self, attrs):
+ for i in attrs:
+ if i[0] == "id" and i[1].startswith("historyItemContainer-"):
+ self.dids.append( i[1].rsplit("historyItemContainer-", 1)[1] )
+ dprint("got a dataset id: %s" % self.dids[-1])
+
+class loggedinParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.in_p = False
+ self.logged_in = False
+ def start_p(self, attrs):
+ self.in_p = True
+ def end_p(self):
+ self.in_p = False
+ def handle_data(self, data):
+ if self.in_p:
+ if data == "You are currently not logged in.":
+ self.logged_in = False
+ elif data.startswith( "You are currently logged in as " ):
+ self.logged_in = True
+
+def dprint(str):
+ if debug:
+ print str
+
+# do stuff here
+if __name__ == "__main__":
+
+ dprint("checking %s" % server)
+
+ b = Browser()
+
+ # login (or not)
+ if b.check_if_logged_in():
+ dprint("we are already logged in (via cookies), hooray!")
+ else:
+ dprint("not logged in... logging in")
+ b.login(username, password)
+
+ for tool, params in tools.iteritems():
+
+ check_file = ""
+
+ # make sure history and state is clean
+ b.reset()
+ b.tool = tool
+
+ # get all the tool run conditions
+ for dict in params:
+ for k, v in dict.items():
+ if k == 'inputs':
+ for input in v:
+ b.upload(input)
+ b.wait()
+ elif k == 'check_file':
+ b.check_file = v
+ elif k == 'tool_run_options':
+ b.tool_opts = v
+ else:
+ raise Exception, "Unknown key in tools dict: %s" % k
+
+ b.runtool()
+ b.wait()
+ b.check_status()
+ b.diff()
+ b.delete_datasets()
+
+ # by this point, everything else has succeeded. there should be no maint.
+ is_maint = b.check_maint()
+ if is_maint:
+ print "Galaxy is up and fully functional, but a maint file is in place."
+ sys.exit(1)
+
+ print "OK"
+ sys.exit(0)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/62bdb265d300/
changeset: 62bdb265d300
user: jgoecks
date: 2012-05-26 00:23:43
summary: Infrastructure for managing visualizations: (a) support for creating multiple types of visualizations; (b) a placeholder visualization API controller; (c) abstract visualization creation so that it can be reused in multiple controllers.
affected #: 6 files
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2445,12 +2445,13 @@
self.user = None
class Visualization( object ):
- def __init__( self, user=None, type=None, title=None, dbkey=None, latest_revision=None ):
+ def __init__( self, user=None, type=None, title=None, dbkey=None, slug=None, latest_revision=None ):
self.id = None
self.user = user
self.type = type
self.title = title
self.dbkey = dbkey
+ self.slug = slug
self.latest_revision = latest_revision
self.revisions = []
if self.latest_revision:
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/visualization/__init__.py
--- a/lib/galaxy/visualization/__init__.py
+++ b/lib/galaxy/visualization/__init__.py
@@ -1,3 +1,3 @@
"""
Package for Galaxy visualization plugins.
-"""
\ No newline at end of file
+"""
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/api/visualizations.py
--- /dev/null
+++ b/lib/galaxy/web/api/visualizations.py
@@ -0,0 +1,28 @@
+from galaxy import web
+from galaxy.web.base.controller import BaseController, BaseAPIController
+
+class VisualizationsController( BaseAPIController ):
+ """
+ RESTful controller for interactions with visualizations.
+ """
+
+ @web.expose_api
+ def index( self, trans, **kwds ):
+ """
+ GET /api/visualizations:
+ """
+ pass
+
+ @web.json
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/visualizations/{viz_id}
+ """
+ pass
+
+ @web.expose_api
+ def create( self, trans, payload, **kwd ):
+ """
+ POST /api/visualizations
+ """
+ pass
\ No newline at end of file
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -7,6 +7,7 @@
from galaxy import config, tools, web, util
from galaxy.util import inflector
from galaxy.util.hash_util import *
+from galaxy.util.sanitize_html import sanitize_html
from galaxy.web import error, form, url_for
from galaxy.model.orm import *
from galaxy.workflow.modules import *
@@ -275,8 +276,45 @@
class UsesVisualization( SharableItemSecurity ):
""" Mixin for controllers that use Visualization objects. """
+
+ viz_types = [ "trackster", "circos" ]
len_files = None
+
+ def create_visualization( self, trans, title, slug, type, dbkey, annotation=None, config={} ):
+ user = trans.get_user()
+
+ # Error checking.
+ title_err = slug_err = ""
+ if not title:
+ title_err = "visualization name is required"
+ elif not slug:
+ slug_err = "visualization id is required"
+ elif not VALID_SLUG_RE.match( slug ):
+ slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
+ slug_err = "visualization id must be unique"
+
+ if title_err or slug_err:
+ return { 'title_err': title_err, 'slug_err': slug_err }
+
+ # Create visualization
+ visualization = trans.model.Visualization( user=user, title=title, slug=slug, dbkey=dbkey, type=type )
+ if annotation:
+ annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
+
+ # And the first visualization revision
+ revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config={}, dbkey=dbkey )
+ visualization.latest_revision = revision
+
+ # Persist
+ session = trans.sa_session
+ session.add(visualization)
+ session.add(revision)
+ session.flush()
+
+ return visualization
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -132,6 +132,7 @@
webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
+ webapp.api_mapper.resource( 'visualization', 'visualizations', path_prefix='/api' )
webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
#webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -4,7 +4,6 @@
from galaxy.web.framework.helpers import time_ago, grids, iff
from galaxy.util.sanitize_html import sanitize_html
-
class VisualizationListGrid( grids.Grid ):
# Grid definition
title = "Saved Visualizations"
@@ -14,6 +13,7 @@
columns = [
grids.TextColumn( "Title", key="title", attach_popup=True,
link=( lambda item: dict( controller="tracks", action="browser", id=item.id ) ) ),
+ grids.TextColumn( "Type", key="type" ),
grids.TextColumn( "Dbkey", key="dbkey" ),
grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False ),
@@ -346,51 +346,31 @@
@web.expose
@web.require_login( "create visualizations" )
- def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="" ):
+ def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="",
+ visualization_type="" ):
"""
Create a new visualization
"""
- user = trans.get_user()
visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
if trans.request.method == "POST":
- if not visualization_title:
- visualization_title_err = "visualization name is required"
- elif not visualization_slug:
- visualization_slug_err = "visualization id is required"
- elif not VALID_SLUG_RE.match( visualization_slug ):
- visualization_slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
- elif trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=visualization_slug, deleted=False ).first():
- visualization_slug_err = "visualization id must be unique"
+ rval = self.create_visualization( trans, title=visualization_title,
+ slug=visualization_slug,
+ annotation=visualization_annotation,
+ dbkey=visualization_dbkey,
+ type=visualization_type )
+ if isinstance( rval, dict ):
+ # Found error creating viz.
+ visualization_title_err = rval[ 'title_err' ]
+ visualization_slug_err = rval[ 'slug_err' ]
else:
- # Create the new stored visualization
- visualization = model.Visualization()
- visualization.title = visualization_title
- visualization.slug = visualization_slug
- visualization.dbkey = visualization_dbkey
- visualization.type = 'trackster' # HACK: set visualization type to trackster since it's the only viz
- visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), visualization, visualization_annotation )
- visualization.user = user
-
- # And the first (empty) visualization revision
- visualization_revision = model.VisualizationRevision()
- visualization_revision.title = visualization_title
- visualization_revision.config = {}
- visualization_revision.dbkey = visualization_dbkey
- visualization_revision.visualization = visualization
- visualization.latest_revision = visualization_revision
-
- # Persist
- session = trans.sa_session
- session.add(visualization)
- session.add(visualization_revision)
- session.flush()
-
+ # Successfully created viz.
return trans.response.send_redirect( web.url_for( action='list' ) )
-
+
+ viz_type_options = [ ( t, t ) for t in self.viz_types ]
return trans.show_form(
web.FormBuilder( web.url_for(), "Create new visualization", submit_text="Submit" )
.add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+ .add_select( "visualization_type", "Type", options=viz_type_options, error=None )
.add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
help="""A unique identifier that will be used for
public links to this visualization. A default is generated
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Partial implementation of Circos visualization using D3.
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4288ffb1bd16/
changeset: 4288ffb1bd16
user: jgoecks
date: 2012-05-25 21:13:25
summary: Partial implementation of Circos visualization using D3.
affected #: 7 files
Diff too large to display.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: guru: Suppressed R package-loading messages which were previously being written into stderr
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8cbbb813f286/
changeset: 8cbbb813f286
user: guru
date: 2012-05-25 16:54:03
summary: Suppressed R package-loading messages which were previously being written into stderr
affected #: 1 file
diff -r 8335baa6881520cabfd3eb41986d7df203dc3183 -r 8cbbb813f2865de22ff671c980816cde5e225be5 tools/regVariation/logistic_regression_vif.py
--- a/tools/regVariation/logistic_regression_vif.py
+++ b/tools/regVariation/logistic_regression_vif.py
@@ -5,16 +5,10 @@
from rpy import *
import numpy
-#export PYTHONPATH=~/galaxy/lib/
-
def stop_err(msg):
sys.stderr.write(msg)
sys.exit()
-#infile = 'logreg_inp.tab'
-#y_col=3
-#x_cols=[1,2,3]
-#outfile='logreg_out.txt'
-#python logistic_regression_vif.py logreg_inp.tab 4 1,2,3 logreg_out2.tabular # running test
+
infile = sys.argv[1]
y_col = int(sys.argv[2])-1
x_cols = sys.argv[3].split(',')
@@ -84,17 +78,11 @@
set_default_mode(NO_CONVERSION)
try:
linear_model = r.glm(r("y ~ x"), data = r.na_exclude(dat),family="binomial")
- #r('library(car)')
- #r.assign('dat',dat)
- #r.assign('ncols',len(x_cols))
- #r.vif(r('glm(dat$y ~ ., data = na.exclude(data.frame(as.matrix(dat$x,ncol=ncols))->datx),family="binomial")')).as_py()
-
except RException, rex:
stop_err("Error performing logistic regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
if len(x_cols)>1:
try:
-
- r('library(car)')
+ r('suppressPackageStartupMessages(library(car))')
r.assign('dat',dat)
r.assign('ncols',len(x_cols))
vif=r.vif(r('glm(dat$y ~ ., data = na.exclude(data.frame(as.matrix(dat$x,ncol=ncols))->datx),family="binomial")'))
@@ -163,9 +151,6 @@
rsq= r.round(float((null_deviance-residual_deviance)/null_deviance), digits=5)
null_deviance= r.round(float(null_deviance), digits=5)
residual_deviance= r.round(float(residual_deviance), digits=5)
-
- #rsq = r.round(float(rsq), digits=5)
-
except:
pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: fubar: Reverse order of job manager and instance in warning
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8335baa68815/
changeset: 8335baa68815
user: fubar
date: 2012-05-25 07:48:49
summary: Reverse order of job manager and instance in warning
affected #: 1 file
diff -r 3f1150fee33c6d7270549e0760fd84b7c35fdb50 -r 8335baa6881520cabfd3eb41986d7df203dc3183 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -2380,7 +2380,7 @@
msg = None
status = None
if self.app.config.job_manager != self.app.config.server_name:
- return trans.show_error_message( 'This Galaxy instance (%s) is not the job manager (%s). If using multiple servers, please directly access the job manager instance to manage jobs.' % ( self.app.config.job_manager,self.app.config.server_name) )
+ return trans.show_error_message( 'This Galaxy instance (%s) is not the job manager (%s). If using multiple servers, please directly access the job manager instance to manage jobs.' % (self.app.config.server_name, self.app.config.job_manager) )
job_ids = util.listify( stop )
if job_ids and stop_msg in [ None, '' ]:
msg = 'Please enter an error message to display to the user describing why the job was terminated'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: fubar: Fix for bogus double negative compare - allows job management from admin screen when only one job manager running
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3f1150fee33c/
changeset: 3f1150fee33c
user: fubar
date: 2012-05-25 07:40:44
summary: Fix for bogus double negative compare - allows job management from admin screen when only one job manager running
affected #: 1 file
diff -r 3fa05d052e820d21f1a6e5ae206d90edcc1a8782 -r 3f1150fee33c6d7270549e0760fd84b7c35fdb50 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -2379,8 +2379,8 @@
deleted = []
msg = None
status = None
- if not self.app.config.job_manager != self.app.config.server_name:
- return trans.show_error_message( 'This Galaxy instance is not the job manager. If using multiple servers, please directly access the job manager instance to manage jobs.' )
+ if self.app.config.job_manager != self.app.config.server_name:
+ return trans.show_error_message( 'This Galaxy instance (%s) is not the job manager (%s). If using multiple servers, please directly access the job manager instance to manage jobs.' % ( self.app.config.job_manager,self.app.config.server_name) )
job_ids = util.listify( stop )
if job_ids and stop_msg in [ None, '' ]:
msg = 'Please enter an error message to display to the user describing why the job was terminated'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3fa05d052e82/
changeset: 3fa05d052e82
user: greg
date: 2012-05-24 22:47:25
summary: Fix a typo in the community common cntroller.
affected #: 1 file
diff -r 676546e7038ae833e524bbc4b4b26c81c71954bd -r 3fa05d052e820d21f1a6e5ae206d90edcc1a8782 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -774,7 +774,7 @@
break
if in_ctx:
tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, dir=work_dir )
- element_tree = util.parse_xml( tmp_config )
+ element_tree = util.parse_xml( tmp_tool_config )
element_tree_root = element_tree.getroot()
# Look for code files required by the tool config.
tmp_code_files = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Enhance setting metadata on tool shed repositories tha tinclude tools that require entries in the tool_data_table_conf.xml file.
by Bitbucket 24 May '12
by Bitbucket 24 May '12
24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/676546e7038a/
changeset: 676546e7038a
user: greg
date: 2012-05-24 22:36:32
summary: Enhance setting metadata on tool shed repositories tha tinclude tools that require entries in the tool_data_table_conf.xml file.
affected #: 5 files
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -8,6 +8,7 @@
from galaxy import eggs
import pkg_resources
+pkg_resources.require('ssh' )
pkg_resources.require( 'Fabric' )
from fabric.api import env, lcd, local
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,5 @@
import sys, os, tempfile, shutil, logging, string, urllib2
+import galaxy.tools.data
from datetime import date, datetime, timedelta
from time import strftime, gmtime
from galaxy import util
@@ -528,10 +529,11 @@
exported_workflow_dict = from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
+ if 'tools' in metadata_dict:
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
return metadata_dict
def generate_tool_guid( repository_clone_url, tool ):
"""
@@ -1026,6 +1028,8 @@
# Reload the tool into the local list of repository_tools_tups.
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
return repository_tools_tups
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ):
"""
@@ -1398,6 +1402,9 @@
if uninstall:
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
+def reset_tool_data_tables( app ):
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( app.config.tool_data_table_config_path )
def strip_path( fpath ):
file_path, file_name = os.path.split( fpath )
return file_name
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -7,7 +7,7 @@
from galaxy.util.hash_util import *
from galaxy.util.shed_util import copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_configured_ui, handle_sample_tool_data_table_conf_file
-from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, strip_path, to_html_escaped, to_html_str, update_repository
+from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, strip_path, to_html_escaped, to_html_str, update_repository
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -245,6 +245,8 @@
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files.append( ( xml_file_in_ctx, correction_msg ) )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
return sample_files_copied, can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata reecords associated with the repository that have a changeset_revision that is not in changeset_revisions.
@@ -437,16 +439,20 @@
work_dir = make_tmp_directory()
datatypes_config = get_config( 'datatypes_conf.xml', repo, repo_dir, ctx, work_dir )
if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
sample_files = get_sample_files( repo, repo_dir, dir=work_dir )
+ # Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if sample_files:
trans.app.config.tool_data_path = work_dir
for filename in ctx:
# Find all tool configs.
ctx_file_name = strip_path( filename )
if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- valid, tool = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
- if valid and tool is not None:
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
+ if is_tool_config and valid and tool is not None:
sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
repo,
repo_dir,
@@ -484,9 +490,11 @@
os.unlink( os.path.join( original_tool_data_path, copied_file ) )
except:
pass
- elif tool is not None:
- # We have a tool config but it is invalid.
- invalid_files.append( ( ctx_file_name, 'Problems loading tool.' ) )
+ elif is_tool_config:
+ if not error_message:
+ error_message = 'Unknown problems loading tool.'
+ # We have a tool config but it is invalid or the tool does not properly load.
+ invalid_files.append( ( ctx_file_name, error_message ) )
invalid_tool_configs.append( ctx_file_name )
# Find all exported workflows.
elif filename.endswith( '.ga' ):
@@ -498,15 +506,18 @@
metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict )
except Exception, e:
invalid_files.append( ( ctx_file_name, str( e ) ) )
- # Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, repo_dir, ctx, work_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
+ if 'tools' in metadata_dict:
+ # Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, repo_dir, ctx, work_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
if sample_files:
# Don't forget to reset the value of trans.app.config.tool_data_path!
trans.app.config.tool_data_path = original_tool_data_path
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
try:
shutil.rmtree( work_dir )
except:
@@ -737,9 +748,14 @@
repository = get_repository( trans, repository_id )
repo_files_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_files_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
tool = None
message = ''
work_dir = make_tmp_directory()
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
+ if tool_data_table_config:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if changeset_revision == repository.tip:
try:
copied_tool_config = copy_file_from_disk( tool_config, repo_files_dir, work_dir )
@@ -750,7 +766,6 @@
else:
# Get the tool config file name from the hgweb url, something like: /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
old_tool_config_file_name = tool_config.split( '/' )[ -1 ]
- ctx = get_changectx_for_changeset( repo, changeset_revision )
in_ctx = False
for ctx_file in ctx.files():
ctx_file_name = strip_path( ctx_file )
@@ -784,14 +799,18 @@
pass
else:
tool = None
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
try:
shutil.rmtree( work_dir )
except:
pass
return tool, message
def load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, dir ):
+ is_tool_config = False
tool = None
valid = False
+ error_message = ''
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir=dir )
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
or check_bz2( tmp_config )[ 0 ] or check_zip( tmp_config ) ):
@@ -799,11 +818,15 @@
# Make sure we're looking at a tool config and not a display application config or something else.
element_tree = util.parse_xml( tmp_config )
element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
+ is_tool_config = element_tree_root.tag == 'tool'
except Exception, e:
log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
- is_tool = False
- if is_tool:
+ is_tool_config = False
+ if is_tool_config:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
+ if tool_data_table_config:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
# Look for code files required by the tool config. The directory to which dir refers should be removed by the caller.
for code_elem in element_tree_root.findall( 'code' ):
code_file_name = code_elem.get( 'file' )
@@ -814,9 +837,17 @@
try:
tool = load_tool( trans, tmp_config )
valid = True
- except:
+ except KeyError, e:
valid = False
- return valid, tool
+ error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
+ error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+ error_message += 'this error. '
+ except Exception, e:
+ valid = False
+ error_message = str( e )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return is_tool_config, valid, tool, error_message
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
@@ -1049,13 +1080,11 @@
error_message, status = reset_all_metadata_on_repository( trans, id, **kwd )
if error_message:
# If there is an error, display it.
- message += '%s<br/>%s ' % ( message, error_message )
- status = 'error'
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
id=id,
- message=message,
- status=status ) )
+ message=error_message,
+ status='error' ) )
def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
# This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge
# is not supported by the mercurial API.
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -950,7 +950,12 @@
for filename in ctx:
# Find all tool configs in this repository changeset_revision.
if filename not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- valid, tool = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans,
+ repo,
+ repo_dir,
+ ctx,
+ filename,
+ work_dir )
if valid and tool is not None:
tool_guids.append( generate_tool_guid( trans, repository, tool ) )
tool_guids.sort()
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util import get_configured_ui, handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import get_configured_ui, reset_tool_data_tables, handle_sample_tool_data_table_conf_file
from galaxy import eggs
eggs.require('mercurial')
@@ -168,6 +168,8 @@
status=status ) )
else:
status = 'error'
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/community/repository/upload.mako',
repository=repository,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0