galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2013
- 1 participants
- 207 discussions
9 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0e9a8f32b5a9/
Changeset: 0e9a8f32b5a9
Branch: page-api
User: kellrott
Date: 2013-12-06 09:17:29
Summary: Starting to add the elements of API based page access
Affected #: 2 files
diff -r d50335029705d4e587a7a6428b9da6e4fc4890cf -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 lib/galaxy/webapps/galaxy/api/pages.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -0,0 +1,32 @@
+"""
+API for searching Galaxy Datasets
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController
+from galaxy.model.search import GalaxySearchEngine
+from galaxy.exceptions import ItemAccessibilityException
+
+log = logging.getLogger( __name__ )
+
+class PagesController( BaseAPIController, SharableItemSecurityMixin ):
+
+ @web.expose_api
+ def index( self, trans, deleted='False', **kwd ):
+ r = trans.sa_session.query( trans.app.model.Page )
+ out = []
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+ return out
+
+
+ @web.expose_api
+ def create( self, trans, payload, **kwd ):
+ return {}
+
+ @web.expose_api
+ def show( self, trans, id, deleted='False', **kwd ):
+ page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ rval = self.encode_all_ids( trans, page.to_dict(), True)
+ rval['content'] = page.latest_revision.content
+ return rval
\ No newline at end of file
diff -r d50335029705d4e587a7a6428b9da6e4fc4890cf -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -166,6 +166,7 @@
webapp.mapper.resource( 'datatype', 'datatypes', path_prefix='/api' )
#webapp.mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
webapp.mapper.resource( 'search', 'search', path_prefix='/api' )
+ webapp.mapper.resource( 'page', 'pages', path_prefix="/api")
# add as a non-ATOM API call to support the notion of a 'current/working' history unique to the history resource
webapp.mapper.connect( "set_as_current", "/api/histories/{id}/set_as_current",
https://bitbucket.org/galaxy/galaxy-central/commits/0e7881a4d1cf/
Changeset: 0e7881a4d1cf
Branch: page-api
User: Kyle Ellrott
Date: 2013-12-17 20:22:45
Summary: Default Merge
Affected #: 389 files
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf config/plugins/visualizations/scatterplot/templates/scatterplot.mako
--- a/config/plugins/visualizations/scatterplot/templates/scatterplot.mako
+++ b/config/plugins/visualizations/scatterplot/templates/scatterplot.mako
@@ -15,7 +15,6 @@
<script type="text/javascript" src="/static/scripts/libs/jquery/jquery.migrate.js"></script><script type="text/javascript" src="/static/scripts/libs/underscore.js"></script><script type="text/javascript" src="/static/scripts/libs/backbone/backbone.js"></script>
-<script type="text/javascript" src="/static/scripts/libs/backbone/backbone-relational.js"></script><script type="text/javascript" src="/static/scripts/libs/handlebars.runtime.js"></script><script type="text/javascript" src="/static/scripts/libs/d3.js"></script><script type="text/javascript" src="/static/scripts/libs/bootstrap.js"></script>
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf doc/source/lib/galaxy.webapps.galaxy.api.rst
--- a/doc/source/lib/galaxy.webapps.galaxy.api.rst
+++ b/doc/source/lib/galaxy.webapps.galaxy.api.rst
@@ -205,6 +205,11 @@
The request and response format should be considered alpha and are subject to change.
+API Return Codes and Formats
+==================
+
+A set of error codes for API requests is being established and will be
+documented here. This is a long-term project however so stayed tuned.
API Controllers
===============
@@ -393,3 +398,67 @@
:undoc-members:
:show-inheritance:
+
+API Design Guidelines
+=====================
+
+The following section outlines guidelines related to extending and/or modifing
+the Galaxy API. The Galaxy API has grown in an ad-hoc fashion over time by
+many contributors and so clients SHOULD NOT expect the API will conform to
+these guidelines - but developers contributing to the Galaxy API SHOULD follow
+these guidelines.
+
+ - API functionality should include docstring documentation for consumption
+ by readthedocs.org.
+ - Developers should familarize themselves with the HTTP status code definitions
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html. The API responses
+ should properly set the status code according to the result - in particular
+ 2XX responses should be used for successful requests, 4XX for various
+ kinds of client errors, and 5XX for errors on the server side.
+ - If there is an error processing some part of request (one item in a list
+ for instance), the status code should be set to reflect the error and the
+ partial result may or may not be returned depending on the controller -
+ this behavior should be documented.
+ - (TODO) API methods should throw a finite number of exceptions (defined in)
+ `galaxy.exceptions` and these should subclass `MessageException` and not
+ paste/wsgi HTTP exceptions. When possible, the framework itself should be
+ responsible catching these exceptions, setting the status code, and
+ building an error response.
+ - Error responses should not consist of plain text strings - they should be
+ dictionaries describing the error and containing the following keys (TODO:
+ spell out nature of this.) Various error conditions (once a format has
+ been chosen and framework to enforce it in place) should be spelled out
+ in this document.
+ - Backward compatibility is important and should maintained when possible.
+ If changing behavior in a non-backward compatibile way please ensure one
+ of the following holds - there is a strong reason to believe no consumers
+ depend on a behavior, the behavior is effectively broken, or the API
+ method being modified has not been part of a tagged dist release.
+
+The following bullet points represent good practices more than guidelines, please
+consider them when modifying the API.
+
+ - Functionality should not be copied and pasted between controllers -
+ consider refactoring functionality into associated classes or short of
+ that into Mixins (http://en.wikipedia.org/wiki/Composition_over_inheritance)
+ - API additions are more permanent changes to Galaxy than many other potential
+ changes and so a second opinion on API changes should be sought. (Consider a
+ pull request!)
+ - New API functionality should include functional tests. These functional
+ tests should be implemented in Python and placed in
+ `test/functional/api`. (Once such a framework is in place - it is not
+ right now).
+ - Changes to reflect modifications to the API should be pushed upstream to
+ the BioBlend project possible.
+
+Longer term goals/notes.
+
+ - It would be advantageous to have a clearer separation of anonymous and
+ admin handling functionality.
+ - If at some point in the future, functionality needs to be added that
+ breaks backward compatibility in a significant way to a compontent used by
+ the community should be alerted - a "dev" variant of the API will be
+ established and the community should be alerted and given a timeframe
+ for when the old behavior will be replaced with the new behavior.
+ - Consistent standards for range-based requests, batch requests, filtered
+ requests, etc... should be established and documented here.
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -52,15 +52,44 @@
<param id="private_token">123456789changeme</param><!-- Uncomment the following statement to disable file staging (e.g.
if there is a shared file system between Galaxy and the LWR
- server). -->
+ server). Alternatively action can be set to 'copy' - to replace
+ http transfers with file system copies. --><!-- <param id="default_file_action">none</param> -->
+ <!-- The above option is just the default, the transfer behavior
+ none|copy|http can be configured on a per path basis via the
+ following file. See lib/galaxy/jobs/runners/lwr_client/action_mapper.py
+ for examples of how to configure this file. This is very beta
+ and nature of file will likely change.
+ -->
+ <!-- <param id="file_action_config">file_actions.json</param> -->
+ <!-- Uncomment following option to disable Galaxy tool dependency
+ resolution and utilize remote LWR's configuraiton of tool
+ dependency resolution instead (same options as Galaxy for
+ dependency resolution are available in LWR).
+ -->
+ <!-- <param id="dependency_resolution">remote</params> -->
+ <!-- Uncomment following option to enable setting metadata on remote
+ LWR server. The 'use_remote_datatypes' option is available for
+ determining whether to use remotely configured datatypes or local
+ ones (both alternatives are a little brittle). -->
+ <!-- <param id="remote_metadata">true</param> -->
+ <!-- <param id="use_remote_datatypes">false</param> -->
+ <!-- If remote LWR server is configured to run jobs as the real user,
+ uncomment the following line to pass the current Galaxy user
+ along. -->
+ <!-- <param id="submit_user">$__user_name__</param> -->
+ <!-- Various other submission parameters can be passed along to the LWR
+ whose use will depend on the remote LWR's configured job manager.
+ For instance:
+ -->
+ <!-- <param id="submit_native_specification">-P bignodes -R y -pe threads 8</param> --></destination><destination id="ssh_torque" runner="cli"><param id="shell_plugin">SecureShell</param><param id="job_plugin">Torque</param><param id="shell_username">foo</param><param id="shell_hostname">foo.example.org</param>
- <param id="Job_Execution_Time">24:00:00</param>
+ <param id="job_Resource_List">walltime=24:00:00,ncpus=4</param></destination><destination id="condor" runner="condor"><!-- With no params, jobs are submitted to the 'vanilla' universe with:
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -1,16 +1,10 @@
from __future__ import absolute_import
-import sys, os, atexit
+import sys
+import os
-from galaxy import config, jobs, util, tools, web
-import galaxy.tools.search
-import galaxy.tools.data
-import tool_shed.galaxy_install
-import tool_shed.tool_shed_registry
-from galaxy.web import security
+from galaxy import config, jobs
import galaxy.model
-import galaxy.datatypes.registry
import galaxy.security
-from galaxy.objectstore import build_object_store_from_config
import galaxy.quota
from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.visualization.genomes import Genomes
@@ -27,7 +21,8 @@
import logging
log = logging.getLogger( __name__ )
-class UniverseApplication( object ):
+
+class UniverseApplication( object, config.ConfiguresGalaxyMixin ):
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
@@ -38,72 +33,38 @@
self.config.check()
config.configure_logging( self.config )
self.configure_fluent_log()
- # Determine the database url
- if self.config.database_connection:
- db_url = self.config.database_connection
- else:
- db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
- # Set up the tool sheds registry
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
- # Initialize database / check for appropriate schema version. # If this
- # is a new installation, we'll restrict the tool migration messaging.
- from galaxy.model.migrate.check import create_or_verify_database
- create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self )
- # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
- from tool_shed.galaxy_install.migrate.check import verify_tools
- verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
- # Object store manager
- self.object_store = build_object_store_from_config(self.config, fsmon=True)
+
+ self._configure_tool_shed_registry()
+
+ self._configure_object_store( fsmon=True )
+
# Setup the database engine and ORM
- from galaxy.model import mapping
- self.model = mapping.init( self.config.file_path,
- db_url,
- self.config.database_engine_options,
- database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store,
- trace_logger=self.trace_logger,
- use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
+ config_file = kwargs.get( 'global_conf', {} ).get( '__file__', None )
+ self._configure_models( check_migrate_databases=True, check_migrate_tools=True, config_file=config_file )
+
# Manage installed tool shed repositories.
- self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager( self )
- # Create an empty datatypes registry.
- self.datatypes_registry = galaxy.datatypes.registry.Registry()
- # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
- # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
- # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
- # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
- # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict
- # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
- self.installed_repository_manager.load_proprietary_datatypes()
- # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
- self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+ from tool_shed.galaxy_install import installed_repository_manager
+ self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager( self )
+
+ self._configure_datatypes_registry( self.installed_repository_manager )
galaxy.model.set_datatypes_registry( self.datatypes_registry )
+
# Security helper
- self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+ self._configure_security()
# Tag handler
self.tag_handler = GalaxyTagHandler()
# Genomes
self.genomes = Genomes( self )
# Data providers registry.
self.data_provider_registry = DataProviderRegistry()
- # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( tool_data_path=self.config.tool_data_path,
- config_filename=self.config.tool_data_table_config_path )
- # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
- self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
- tool_data_path=self.tool_data_tables.tool_data_path,
- from_shed_config=False )
+
+ self._configure_tool_data_tables( from_shed_config=False )
+
# Initialize the job management configuration
self.job_config = jobs.JobConfiguration(self)
- # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
- tool_configs = self.config.tool_configs
- if self.config.migrated_tools_config not in tool_configs:
- tool_configs.append( self.config.migrated_tools_config )
- self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
- # Search support for tools
- self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
+
+ self._configure_toolbox()
+
# Load Data Manager
self.data_managers = DataManagers( self )
# If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -1,6 +1,8 @@
"""
Universe configuration builder.
"""
+# absolute_import needed for tool_shed package.
+from __future__ import absolute_import
import sys, os, tempfile, re
import logging, logging.config
@@ -33,15 +35,22 @@
self.umask = os.umask( 077 ) # get the current umask
os.umask( self.umask ) # can't get w/o set, so set it back
self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster
+
# Database related configuration
self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
self.database_connection = kwargs.get( "database_connection", False )
self.database_engine_options = get_database_engine_options( kwargs )
self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) )
+
# Don't set this to true for production databases, but probably should
# default to True for sqlite databases.
self.database_auto_migrate = string_as_bool( kwargs.get( "database_auto_migrate", "False" ) )
+
+ # Install database related configuration (if different).
+ self.install_database_connection = kwargs.get( "install_database_connection", None )
+ self.install_database_engine_options = get_database_engine_options( kwargs, model_prefix="install_" )
+
# Where dataset files are stored
self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
@@ -439,7 +448,7 @@
admin_users = [ x.strip() for x in self.get( "admin_users", "" ).split( "," ) ]
return ( user is not None and user.email in admin_users )
-def get_database_engine_options( kwargs ):
+def get_database_engine_options( kwargs, model_prefix='' ):
"""
Allow options for the SQLAlchemy database engine to be passed by using
the prefix "database_engine_option".
@@ -455,7 +464,7 @@
'pool_threadlocal': string_as_bool,
'server_side_cursors': string_as_bool
}
- prefix = "database_engine_option_"
+ prefix = "%sdatabase_engine_option_" % model_prefix
prefix_len = len( prefix )
rval = {}
for key, value in kwargs.iteritems():
@@ -466,6 +475,7 @@
rval[ key ] = value
return rval
+
def configure_logging( config ):
"""
Allow some basic logging configuration to be read from ini file.
@@ -506,3 +516,110 @@
sentry_handler.setLevel( logging.WARN )
root.addHandler( sentry_handler )
+
+class ConfiguresGalaxyMixin:
+ """ Shared code for configuring Galaxy-like app objects.
+ """
+
+ def _configure_toolbox( self ):
+ # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
+ tool_configs = self.config.tool_configs
+ if self.config.migrated_tools_config not in tool_configs:
+ tool_configs.append( self.config.migrated_tools_config )
+ from galaxy import tools
+ self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
+ # Search support for tools
+ import galaxy.tools.search
+ self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
+
+ def _configure_tool_data_tables( self, from_shed_config ):
+ from galaxy.tools.data import ToolDataTableManager
+
+ # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
+ self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
+ config_filename=self.config.tool_data_table_config_path )
+ # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
+ self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
+ tool_data_path=self.tool_data_tables.tool_data_path,
+ from_shed_config=from_shed_config )
+
+ def _configure_datatypes_registry( self, installed_repository_manager=None ):
+ from galaxy.datatypes import registry
+ # Create an empty datatypes registry.
+ self.datatypes_registry = registry.Registry()
+ if installed_repository_manager:
+ # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
+ # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
+ # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
+ # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
+ # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict
+ # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
+ installed_repository_manager.load_proprietary_datatypes()
+ # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
+ self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+
+ def _configure_object_store( self, **kwds ):
+ from galaxy.objectstore import build_object_store_from_config
+ self.object_store = build_object_store_from_config( self.config, **kwds )
+
+ def _configure_security( self ):
+ from galaxy.web import security
+ self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+
+ def _configure_tool_shed_registry( self ):
+ import tool_shed.tool_shed_registry
+
+ # Set up the tool sheds registry
+ if os.path.isfile( self.config.tool_sheds_config ):
+ self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ else:
+ self.tool_shed_registry = None
+
+ def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ):
+ """
+ Preconditions: object_store must be set on self.
+ """
+ if self.config.database_connection:
+ db_url = self.config.database_connection
+ else:
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+ install_db_url = self.config.install_database_connection
+ # TODO: Consider more aggressive check here that this is not the same
+ # database file under the hood.
+ combined_install_database = not( install_db_url and install_db_url != db_url )
+ install_db_url = install_db_url or db_url
+
+ if check_migrate_databases:
+ # Initialize database / check for appropriate schema version. # If this
+ # is a new installation, we'll restrict the tool migration messaging.
+ from galaxy.model.migrate.check import create_or_verify_database
+ create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self )
+ if not combined_install_database:
+ from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
+ tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
+
+ if check_migrate_tools:
+ # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
+ from tool_shed.galaxy_install.migrate.check import verify_tools
+ verify_tools( self, install_db_url, config_file, self.config.database_engine_options )
+
+ from galaxy.model import mapping
+ self.model = mapping.init( self.config.file_path,
+ db_url,
+ self.config.database_engine_options,
+ map_install_models=combined_install_database,
+ database_query_profiling_proxy=self.config.database_query_profiling_proxy,
+ object_store=self.object_store,
+ trace_logger=getattr(self, "trace_logger", None),
+ use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
+
+ if combined_install_database:
+ log.info("Install database targetting Galaxy's database configuration.")
+ self.install_model = self.model
+ else:
+ from galaxy.model.tool_shed_install import mapping as install_mapping
+ install_db_url = self.config.install_database_connection
+ log.info("Install database using its own connection %s" % install_db_url)
+ install_db_engine_options = self.config.install_database_engine_options
+ self.install_model = install_mapping.init( install_db_url,
+ install_db_engine_options )
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/datatypes/checkers.py
--- a/lib/galaxy/datatypes/checkers.py
+++ b/lib/galaxy/datatypes/checkers.py
@@ -2,6 +2,8 @@
from galaxy import util
from StringIO import StringIO
+HTML_CHECK_LINES = 100
+
try:
import Image as PIL
except ImportError:
@@ -32,9 +34,11 @@
regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
- regexp4 = re.compile( "<META[^>]*>", re.I )
+ regexp4 = re.compile( "<META[\W][^>]*>", re.I )
regexp5 = re.compile( "<SCRIPT[^>]*>", re.I )
lineno = 0
+ # TODO: Potentially reading huge lines into string here, this should be
+ # reworked.
for line in temp:
lineno += 1
matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line ) or regexp5.search( line )
@@ -42,7 +46,7 @@
if chunk is None:
temp.close()
return True
- if lineno > 100:
+ if HTML_CHECK_LINES and (lineno > HTML_CHECK_LINES):
break
if chunk is None:
temp.close()
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -199,6 +199,26 @@
out = "Can't create peek %s" % str( exc )
return out
+ def _archive_main_file(self, archive, display_name, data_filename):
+ """Called from _archive_composite_dataset to add central file to archive.
+
+ Unless subclassed, this will add the main dataset file (argument data_filename)
+ to the archive, as an HTML file with its filename derived from the dataset name
+ (argument outfname).
+
+ Returns a tuple of boolean, string, string: (error, msg, messagetype)
+ """
+ error, msg, messagetype = False, "", ""
+ archname = '%s.html' % display_name # fake the real nature of the html file
+ try:
+ archive.add(data_filename, archname)
+ except IOError:
+ error = True
+ log.exception("Unable to add composite parent %s to temporary library download archive" % data_filename)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = "error"
+ return error, msg, messagetype
+
def _archive_composite_dataset( self, trans, data=None, **kwd ):
# save a composite object into a compressed archive for downloading
params = util.Params( kwd )
@@ -237,29 +257,27 @@
path = data.file_name
fname = os.path.split(path)[-1]
efp = data.extra_files_path
- htmlname = os.path.splitext(outfname)[0]
- if not htmlname.endswith(ext):
- htmlname = '%s_%s' % (htmlname,ext)
- archname = '%s.html' % htmlname # fake the real nature of the html file
- try:
- archive.add(data.file_name,archname)
- except IOError:
- error = True
- log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
- msg = "Unable to create archive for download, please report this error"
- messagetype = 'error'
- for root, dirs, files in os.walk(efp):
- for fname in files:
- fpath = os.path.join(root,fname)
- rpath = os.path.relpath(fpath,efp)
- try:
- archive.add( fpath,rpath )
- except IOError:
- error = True
- log.exception( "Unable to add %s to temporary library download archive" % rpath)
- msg = "Unable to create archive for download, please report this error"
- messagetype = 'error'
- continue
+ #Add any central file to the archive,
+
+ display_name = os.path.splitext(outfname)[0]
+ if not display_name.endswith(ext):
+ display_name = '%s_%s' % (display_name, ext)
+
+ error, msg, messagetype = self._archive_main_file(archive, display_name, path)
+ if not error:
+ #Add any child files to the archive,
+ for root, dirs, files in os.walk(efp):
+ for fname in files:
+ fpath = os.path.join(root,fname)
+ rpath = os.path.relpath(fpath,efp)
+ try:
+ archive.add( fpath,rpath )
+ except IOError:
+ error = True
+ log.exception( "Unable to add %s to temporary library download archive" % rpath)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = 'error'
+ continue
if not error:
if params.do_action == 'zip':
archive.close()
@@ -288,7 +306,14 @@
return open( dataset.file_name )
def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
- """ Old display method, for transition """
+ """ Old display method, for transition - though still used by API and
+ test framework. Datatypes should be very careful if overridding this
+ method and this interface between datatypes and Galaxy will likely
+ change.
+
+ TOOD: Document alternatives to overridding this method (data
+ providers?).
+ """
#Relocate all composite datatype display to a common location.
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
composite_extensions.append('html') # for archiving composite datatypes
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -348,7 +348,7 @@
try:
aclass = getattr( module, datatype_class_name )()
except Exception, e:
- self.log.exception( 'Error calling method %s from class %s: %s' ( str( datatype_class_name ), str( module ), str( e ) ) )
+ self.log.exception( 'Error calling method %s from class %s: %s', str( datatype_class_name ), str( module ), str( e ) )
ok = False
if ok:
if deactivate:
@@ -598,6 +598,9 @@
tool_xml_text = """
<tool id="__SET_METADATA__" name="Set External Metadata" version="1.0.1" tool_type="set_metadata"><type class="SetMetadataTool" module="galaxy.tools"/>
+ <requirements>
+ <requirement type="package">samtools</requirement>
+ </requirements><action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/><command>$__SET_EXTERNAL_METADATA_COMMAND_LINE__</command><inputs>
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/exceptions/__init__.py
--- a/lib/galaxy/exceptions/__init__.py
+++ b/lib/galaxy/exceptions/__init__.py
@@ -1,6 +1,10 @@
"""
Custom exceptions for Galaxy
"""
+
+from galaxy import eggs
+eggs.require( "Paste" )
+
from paste import httpexceptions
class MessageException( Exception ):
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -14,7 +14,6 @@
import shutil
import subprocess
import sys
-import threading
import traceback
from galaxy import model, util
from galaxy.datatypes import metadata
@@ -39,21 +38,6 @@
# and should eventually become API'd
TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
-class Sleeper( object ):
- """
- Provides a 'sleep' method that sleeps for a number of seconds *unless*
- the notify method is called (from a different thread).
- """
- def __init__( self ):
- self.condition = threading.Condition()
- def sleep( self, seconds ):
- self.condition.acquire()
- self.condition.wait( seconds )
- self.condition.release()
- def wake( self ):
- self.condition.acquire()
- self.condition.notify()
- self.condition.release()
class JobDestination( Bunch ):
"""
@@ -704,10 +688,7 @@
if self.command_line and self.command_line.startswith( 'python' ):
self.galaxy_lib_dir = os.path.abspath( "lib" ) # cwd = galaxy root
# Shell fragment to inject dependencies
- if self.app.config.use_tool_dependencies:
- self.dependency_shell_commands = self.tool.build_dependency_shell_commands()
- else:
- self.dependency_shell_commands = None
+ self.dependency_shell_commands = self.tool.build_dependency_shell_commands()
# We need command_line persisted to the db in order for Galaxy to re-queue the job
# if the server was stopped and restarted before the job finished
job.command_line = self.command_line
@@ -1451,10 +1432,7 @@
if self.command_line and self.command_line.startswith( 'python' ):
self.galaxy_lib_dir = os.path.abspath( "lib" ) # cwd = galaxy root
# Shell fragment to inject dependencies
- if self.app.config.use_tool_dependencies:
- self.dependency_shell_commands = self.tool.build_dependency_shell_commands()
- else:
- self.dependency_shell_commands = None
+ self.dependency_shell_commands = self.tool.build_dependency_shell_commands()
# We need command_line persisted to the db in order for Galaxy to re-queue the job
# if the server was stopped and restarted before the job finished
task.command_line = self.command_line
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/actions/post.py
--- a/lib/galaxy/jobs/actions/post.py
+++ b/lib/galaxy/jobs/actions/post.py
@@ -12,7 +12,7 @@
form = """
if (pja.action_type == "%s"){
p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + pja.output_name + "</span><div class='toolFormTitle'> %s <br/> on " + pja.output_name + "\
- <div style='float: right;' class='buttons'><img src='/static/images/delete_icon.png'></div></div><div class='toolFormBody'>";
+ <div style='float: right;' class='buttons'><img src='/static/images/history-buttons/delete_icon.png'></div></div><div class='toolFormBody'>";
%s
p_str += "</div><div class='toolParamHelp'>%s</div></div>";
}""" % (action_type, title, content, help)
@@ -20,7 +20,7 @@
form = """
if (pja.action_type == "%s"){
p_str = "<div class='pjaForm toolForm'><span class='action_tag' style='display:none'>"+ pja.action_type + "</span><div class='toolFormTitle'> %s \
- <div style='float: right;' class='buttons'><img src='/static/images/delete_icon.png'></div></div><div class='toolFormBody'>";
+ <div style='float: right;' class='buttons'><img src='/static/images/history-buttons/delete_icon.png'></div></div><div class='toolFormBody'>";
%s
p_str += "</div><div class='toolParamHelp'>%s</div></div>";
}""" % (action_type, title, content, help)
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/command_factory.py
--- a/lib/galaxy/jobs/command_factory.py
+++ b/lib/galaxy/jobs/command_factory.py
@@ -1,8 +1,11 @@
from os import getcwd
from os.path import abspath
+CAPTURE_RETURN_CODE = "return_code=$?"
+YIELD_CAPTURED_CODE = 'sh -c "exit $return_code"'
-def build_command( job, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
+
+def build_command( runner, job_wrapper, include_metadata=False, include_work_dir_outputs=True, remote_command_params={} ):
"""
Compose the sequence of commands necessary to execute a job. This will
currently include:
@@ -13,64 +16,125 @@
- commands to set metadata (if include_metadata is True)
"""
- commands = job_wrapper.get_command_line()
+ commands_builder = CommandsBuilder(job_wrapper.get_command_line())
# All job runners currently handle this case which should never occur
- if not commands:
+ if not commands_builder.commands:
return None
- # Remove trailing semi-colon so we can start hacking up this command.
- # TODO: Refactor to compose a list and join with ';', would be more clean.
- commands = commands.rstrip("; ")
+ __handle_version_command(commands_builder, job_wrapper)
+ __handle_task_splitting(commands_builder, job_wrapper)
+ __handle_dependency_resolution(commands_builder, job_wrapper, remote_command_params)
+ if include_work_dir_outputs:
+ __handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_command_params)
+
+ if include_metadata and job_wrapper.requires_setting_metadata:
+ __handle_metadata(commands_builder, job_wrapper, runner, remote_command_params)
+
+ return commands_builder.build()
+
+
+def __handle_version_command(commands_builder, job_wrapper):
# Prepend version string
if job_wrapper.version_string_cmd:
- commands = "%s &> %s; " % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() ) + commands
+ version_command = "%s &> %s" % ( job_wrapper.version_string_cmd, job_wrapper.get_version_string_path() )
+ commands_builder.prepend_command(version_command)
+
+def __handle_task_splitting(commands_builder, job_wrapper):
# prepend getting input files (if defined)
- if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
- commands = "; ".join( job_wrapper.prepare_input_files_cmds + [ commands ] )
+ if getattr(job_wrapper, 'prepare_input_files_cmds', None):
+ commands_builder.prepend_commands(job_wrapper.prepare_input_files_cmds)
+
+
+def __handle_dependency_resolution(commands_builder, job_wrapper, remote_command_params):
+ local_dependency_resolution = remote_command_params.get("dependency_resolution", "local") == "local"
# Prepend dependency injection
- if job_wrapper.dependency_shell_commands:
- commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
+ if job_wrapper.dependency_shell_commands and local_dependency_resolution:
+ commands_builder.prepend_commands(job_wrapper.dependency_shell_commands)
- # Coping work dir outputs or setting metadata will mask return code of
- # tool command. If these are used capture the return code and ensure
- # the last thing that happens is an exit with return code.
- capture_return_code_command = "; return_code=$?"
- captured_return_code = False
+def __handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_command_params):
# Append commands to copy job outputs based on from_work_dir attribute.
- if include_work_dir_outputs:
- work_dir_outputs = job.get_work_dir_outputs( job_wrapper )
- if work_dir_outputs:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
+ work_dir_outputs_kwds = {}
+ if 'working_directory' in remote_command_params:
+ work_dir_outputs_kwds['job_working_directory'] = remote_command_params['working_directory']
+ work_dir_outputs = runner.get_work_dir_outputs( job_wrapper, **work_dir_outputs_kwds )
+ if work_dir_outputs:
+ commands_builder.capture_return_code()
+ copy_commands = map(__copy_if_exists_command, work_dir_outputs)
+ commands_builder.append_commands(copy_commands)
- commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
- ( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
+def __handle_metadata(commands_builder, job_wrapper, runner, remote_command_params):
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
- if include_metadata and job_wrapper.requires_setting_metadata:
- metadata_command = job_wrapper.setup_external_metadata(
- exec_dir=abspath( getcwd() ),
- tmp_dir=job_wrapper.working_directory,
- dataset_files_path=job.app.model.Dataset.file_path,
- output_fnames=job_wrapper.get_output_fnames(),
- set_extension=False,
- kwds={ 'overwrite' : False }
- ) or ''
- metadata_command = metadata_command.strip()
- if metadata_command:
- if not captured_return_code:
- commands += capture_return_code_command
- captured_return_code = True
- commands += "; cd %s; %s" % (abspath( getcwd() ), metadata_command)
+ metadata_kwds = remote_command_params.get('metadata_kwds', {})
+ exec_dir = metadata_kwds.get( 'exec_dir', abspath( getcwd() ) )
+ tmp_dir = metadata_kwds.get( 'tmp_dir', job_wrapper.working_directory )
+ dataset_files_path = metadata_kwds.get( 'dataset_files_path', runner.app.model.Dataset.file_path )
+ output_fnames = metadata_kwds.get( 'output_fnames', job_wrapper.get_output_fnames() )
+ config_root = metadata_kwds.get( 'config_root', None )
+ config_file = metadata_kwds.get( 'config_file', None )
+ datatypes_config = metadata_kwds.get( 'datatypes_config', None )
+ metadata_command = job_wrapper.setup_external_metadata(
+ exec_dir=exec_dir,
+ tmp_dir=tmp_dir,
+ dataset_files_path=dataset_files_path,
+ output_fnames=output_fnames,
+ set_extension=False,
+ config_root=config_root,
+ config_file=config_file,
+ datatypes_config=datatypes_config,
+ kwds={ 'overwrite' : False }
+ ) or ''
+ metadata_command = metadata_command.strip()
+ if metadata_command:
+ commands_builder.capture_return_code()
+ commands_builder.append_command("cd %s; %s" % (exec_dir, metadata_command))
- if captured_return_code:
- commands += '; sh -c "exit $return_code"'
- return commands
+def __copy_if_exists_command(work_dir_output):
+ source_file, destination = work_dir_output
+ return "if [ -f %s ] ; then cp %s %s ; fi" % ( source_file, source_file, destination )
+
+
+class CommandsBuilder(object):
+
+ def __init__(self, initial_command):
+ # Remove trailing semi-colon so we can start hacking up this command.
+ # TODO: Refactor to compose a list and join with ';', would be more clean.
+ commands = initial_command.rstrip("; ")
+ self.commands = commands
+
+ # Coping work dir outputs or setting metadata will mask return code of
+ # tool command. If these are used capture the return code and ensure
+ # the last thing that happens is an exit with return code.
+ self.return_code_captured = False
+
+ def prepend_command(self, command):
+ self.commands = "%s; %s" % (command, self.commands)
+ return self
+
+ def prepend_commands(self, commands):
+ return self.prepend_command("; ".join(commands))
+
+ def append_command(self, command):
+ self.commands = "%s; %s" % (self.commands, command)
+
+ def append_commands(self, commands):
+ self.append_command("; ".join(commands))
+
+ def capture_return_code(self):
+ if not self.return_code_captured:
+ self.return_code_captured = True
+ self.append_command(CAPTURE_RETURN_CODE)
+
+ def build(self):
+ if self.return_code_captured:
+ self.append_command(YIELD_CAPTURED_CODE)
+ return self.commands
+
+__all__ = [build_command]
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -11,7 +11,8 @@
from sqlalchemy.sql.expression import and_, or_, select, func
from galaxy import model
-from galaxy.jobs import Sleeper, JobWrapper, TaskWrapper, JobDestination
+from galaxy.util.sleeper import Sleeper
+from galaxy.jobs import JobWrapper, TaskWrapper, JobDestination
log = logging.getLogger( __name__ )
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -11,7 +11,8 @@
from Queue import Empty, Queue
from galaxy import model
-from galaxy.jobs import handler, JobWrapper, NoopQueue, Sleeper
+from galaxy.util.sleeper import Sleeper
+from galaxy.jobs import handler, JobWrapper, NoopQueue
from galaxy.util.json import from_json_string
log = logging.getLogger( __name__ )
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -146,11 +146,13 @@
def build_command_line( self, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
return build_command( self, job_wrapper, include_metadata=include_metadata, include_work_dir_outputs=include_work_dir_outputs )
- def get_work_dir_outputs( self, job_wrapper ):
+ def get_work_dir_outputs( self, job_wrapper, job_working_directory=None ):
"""
Returns list of pairs (source_file, destination) describing path
to work_dir output file and ultimate destination.
"""
+ if not job_working_directory:
+ job_working_directory = os.path.abspath( job_wrapper.working_directory )
def in_directory( file, directory ):
"""
@@ -186,7 +188,7 @@
if hda_tool_output and hda_tool_output.from_work_dir:
# Copy from working dir to HDA.
# TODO: move instead of copy to save time?
- source_file = os.path.join( os.path.abspath( job_wrapper.working_directory ), hda_tool_output.from_work_dir )
+ source_file = os.path.join( job_working_directory, hda_tool_output.from_work_dir )
destination = job_wrapper.get_output_destination( output_paths[ dataset.dataset_id ] )
if in_directory( source_file, job_wrapper.working_directory ):
output_pairs.append( ( source_file, destination ) )
@@ -196,7 +198,7 @@
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
return output_pairs
- def _handle_metadata_externally(self, job_wrapper):
+ def _handle_metadata_externally( self, job_wrapper, resolve_requirements=False ):
"""
Set metadata externally. Used by the local and lwr job runners where this
shouldn't be attached to command-line to execute.
@@ -210,6 +212,12 @@
tmp_dir=job_wrapper.working_directory,
#we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
kwds={ 'overwrite' : False } )
+ if resolve_requirements:
+ dependency_shell_commands = self.app.datatypes_registry.set_external_metadata_tool.build_dependency_shell_commands()
+ if dependency_shell_commands:
+ if isinstance( dependency_shell_commands, list ):
+ dependency_shell_commands = "&&".join( dependency_shell_commands )
+ external_metadata_script = "%s&&%s" % ( dependency_shell_commands, external_metadata_script )
log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) )
external_metadata_proc = subprocess.Popen( args=external_metadata_script,
shell=True,
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/cli_job/torque.py
--- a/lib/galaxy/jobs/runners/cli_job/torque.py
+++ b/lib/galaxy/jobs/runners/cli_job/torque.py
@@ -35,7 +35,8 @@
echo $? > %s
"""
-argmap = { 'Execution_Time' : '-a',
+argmap = { 'destination' : '-q',
+ 'Execution_Time' : '-a',
'Account_Name' : '-A',
'Checkpoint' : '-c',
'Error_Path' : '-e',
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -302,7 +302,15 @@
The external script will be run with sudo, and will setuid() to the specified user.
Effectively, will QSUB as a different user (then the one used by Galaxy).
"""
- p = subprocess.Popen([ '/usr/bin/sudo', '-E', self.external_runJob_script, str(username), jobtemplate_filename ],
+ script_parts = self.external_runJob_script.split()
+ script = script_parts[0]
+ command = [ '/usr/bin/sudo', '-E', script]
+ for script_argument in script_parts[1:]:
+ command.append(script_argument)
+
+ command.extend( [ str(username), jobtemplate_filename ] )
+ log.info("Running command %s" % command)
+ p = subprocess.Popen(command,
shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutdata, stderrdata) = p.communicate()
exitcode = p.returncode
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -110,7 +110,7 @@
job_wrapper.fail( "failure running job", exception=True )
log.exception("failure running job %d" % job_wrapper.job_id)
return
- self._handle_metadata_externally( job_wrapper )
+ self._handle_metadata_externally( job_wrapper, resolve_requirements=True )
# Finish the job!
try:
job_wrapper.finish( stdout, stderr, exit_code )
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -3,7 +3,9 @@
from galaxy import model
from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner
from galaxy.jobs import JobDestination
+from galaxy.jobs.command_factory import build_command
from galaxy.util import string_as_bool_or_none
+from galaxy.util.bunch import Bunch
import errno
from time import sleep
@@ -12,11 +14,15 @@
from .lwr_client import ClientManager, url_to_destination_params
from .lwr_client import finish_job as lwr_finish_job
from .lwr_client import submit_job as lwr_submit_job
+from .lwr_client import ClientJobDescription
log = logging.getLogger( __name__ )
__all__ = [ 'LwrJobRunner' ]
+NO_REMOTE_GALAXY_FOR_METADATA_MESSAGE = "LWR misconfiguration - LWR client configured to set metadata remotely, but remote LWR isn't properly configured with a galaxy_home directory."
+NO_REMOTE_DATATYPES_CONFIG = "LWR client is configured to use remote datatypes configuration when setting metadata externally, but LWR is not configured with this information. Defaulting to datatypes_conf.xml."
+
class LwrJobRunner( AsynchronousJobRunner ):
"""
@@ -54,40 +60,31 @@
return job_state
def queue_job(self, job_wrapper):
- command_line = ''
job_destination = job_wrapper.job_destination
- try:
- job_wrapper.prepare()
- if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
- for cmd in job_wrapper.prepare_input_files_cmds: # run the commands to stage the input files
- #log.debug( 'executing: %s' % cmd )
- if 0 != os.system(cmd):
- raise Exception('Error running file staging command: %s' % cmd)
- job_wrapper.prepare_input_files_cmds = None # prevent them from being used in-line
- command_line = self.build_command_line( job_wrapper, include_metadata=False, include_work_dir_outputs=False )
- except:
- job_wrapper.fail( "failure preparing job", exception=True )
- log.exception("failure running job %d" % job_wrapper.job_id)
- return
+ command_line, client, remote_job_config = self.__prepare_job( job_wrapper, job_destination )
- # If we were able to get a command line, run the job
if not command_line:
- job_wrapper.finish( '', '' )
return
try:
- client = self.get_client_from_wrapper(job_wrapper)
- output_files = self.get_output_files(job_wrapper)
- input_files = job_wrapper.get_input_fnames()
- working_directory = job_wrapper.working_directory
- tool = job_wrapper.tool
- config_files = job_wrapper.extra_filenames
- job_id = lwr_submit_job(client, tool, command_line, config_files, input_files, output_files, working_directory)
+ dependency_resolution = LwrJobRunner.__dependency_resolution( client )
+ remote_dependency_resolution = dependency_resolution == "remote"
+ requirements = job_wrapper.tool.requirements if remote_dependency_resolution else []
+ client_job_description = ClientJobDescription(
+ command_line=command_line,
+ output_files=self.get_output_files(job_wrapper),
+ input_files=job_wrapper.get_input_fnames(),
+ working_directory=job_wrapper.working_directory,
+ tool=job_wrapper.tool,
+ config_files=job_wrapper.extra_filenames,
+ requirements=requirements,
+ )
+ job_id = lwr_submit_job(client, client_job_description, remote_job_config)
log.info("lwr job submitted with job_id %s" % job_id)
job_wrapper.set_job_destination( job_destination, job_id )
job_wrapper.change_state( model.Job.states.QUEUED )
- except:
+ except Exception:
job_wrapper.fail( "failure running job", exception=True )
log.exception("failure running job %d" % job_wrapper.job_id)
return
@@ -100,6 +97,52 @@
lwr_job_state.job_destination = job_destination
self.monitor_job(lwr_job_state)
+ def __prepare_job(self, job_wrapper, job_destination):
+ """ Build command-line and LWR client for this job. """
+ command_line = None
+ client = None
+ remote_job_config = None
+ try:
+ job_wrapper.prepare()
+ self.__prepare_input_files_locally(job_wrapper)
+ client = self.get_client_from_wrapper(job_wrapper)
+ tool = job_wrapper.tool
+ remote_job_config = client.setup(tool.id, tool.version)
+ remote_metadata = LwrJobRunner.__remote_metadata( client )
+ remote_work_dir_copy = LwrJobRunner.__remote_work_dir_copy( client )
+ dependency_resolution = LwrJobRunner.__dependency_resolution( client )
+ metadata_kwds = self.__build_metadata_configuration(client, job_wrapper, remote_metadata, remote_job_config)
+ remote_command_params = dict(
+ working_directory=remote_job_config['working_directory'],
+ metadata_kwds=metadata_kwds,
+ dependency_resolution=dependency_resolution,
+ )
+ command_line = build_command(
+ self,
+ job_wrapper=job_wrapper,
+ include_metadata=remote_metadata,
+ include_work_dir_outputs=remote_work_dir_copy,
+ remote_command_params=remote_command_params,
+ )
+ except Exception:
+ job_wrapper.fail( "failure preparing job", exception=True )
+ log.exception("failure running job %d" % job_wrapper.job_id)
+
+ # If we were able to get a command line, run the job
+ if not command_line:
+ job_wrapper.finish( '', '' )
+
+ return command_line, client, remote_job_config
+
+ def __prepare_input_files_locally(self, job_wrapper):
+ """Run task splitting commands locally."""
+ prepare_input_files_cmds = getattr(job_wrapper, 'prepare_input_files_cmds', None)
+ if prepare_input_files_cmds is not None:
+ for cmd in prepare_input_files_cmds: # run the commands to stage the input files
+ if 0 != os.system(cmd):
+ raise Exception('Error running file staging command: %s' % cmd)
+ job_wrapper.prepare_input_files_cmds = None # prevent them from being used in-line
+
def get_output_files(self, job_wrapper):
output_fnames = job_wrapper.get_output_fnames()
return [ str( o ) for o in output_fnames ]
@@ -130,34 +173,42 @@
run_results = client.raw_check_complete()
stdout = run_results.get('stdout', '')
stderr = run_results.get('stderr', '')
-
+ working_directory_contents = run_results.get('working_directory_contents', [])
# Use LWR client code to transfer/copy files back
# and cleanup job if needed.
completed_normally = \
job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]
cleanup_job = self.app.config.cleanup_job
- work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
+ remote_work_dir_copy = LwrJobRunner.__remote_work_dir_copy( client )
+ if not remote_work_dir_copy:
+ work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
+ else:
+ # They have already been copied over to look like regular outputs remotely,
+ # no need to handle them differently here.
+ work_dir_outputs = []
output_files = self.get_output_files( job_wrapper )
finish_args = dict( client=client,
working_directory=job_wrapper.working_directory,
job_completed_normally=completed_normally,
cleanup_job=cleanup_job,
work_dir_outputs=work_dir_outputs,
- output_files=output_files )
+ output_files=output_files,
+ working_directory_contents=working_directory_contents )
failed = lwr_finish_job( **finish_args )
if failed:
job_wrapper.fail("Failed to find or download one or more job outputs from remote server.", exception=True)
- except:
+ except Exception:
message = "Failed to communicate with remote job server."
job_wrapper.fail( message, exception=True )
log.exception("failure running job %d" % job_wrapper.job_id)
return
- self._handle_metadata_externally( job_wrapper )
+ if not LwrJobRunner.__remote_metadata( client ):
+ self._handle_metadata_externally( job_wrapper, resolve_requirements=True )
# Finish the job
try:
job_wrapper.finish( stdout, stderr )
- except:
+ except Exception:
log.exception("Job wrapper finish method failed")
job_wrapper.fail("Unable to finish job", exception=True)
@@ -225,3 +276,71 @@
job_state.old_state = True
job_state.running = state == model.Job.states.RUNNING
self.monitor_queue.put( job_state )
+
+ @staticmethod
+ def __dependency_resolution( lwr_client ):
+ dependency_resolution = lwr_client.destination_params.get( "dependency_resolution", "local" )
+ if dependency_resolution not in ["none", "local", "remote"]:
+ raise Exception("Unknown dependency_resolution value encountered %s" % dependency_resolution)
+ return dependency_resolution
+
+ @staticmethod
+ def __remote_metadata( lwr_client ):
+ remote_metadata = string_as_bool_or_none( lwr_client.destination_params.get( "remote_metadata", False ) )
+ return remote_metadata
+
+ @staticmethod
+ def __remote_work_dir_copy( lwr_client ):
+ # Right now remote metadata handling assumes from_work_dir outputs
+ # have been copied over before it runs. So do that remotely. This is
+ # not the default though because adding it to the command line is not
+ # cross-platform (no cp on Windows) and its un-needed work outside
+ # the context of metadata settting (just as easy to download from
+ # either place.)
+ return LwrJobRunner.__remote_metadata( lwr_client )
+
+ @staticmethod
+ def __use_remote_datatypes_conf( lwr_client ):
+ """ When setting remote metadata, use integrated datatypes from this
+ Galaxy instance or use the datatypes config configured via the remote
+ LWR.
+
+ Both options are broken in different ways for same reason - datatypes
+ may not match. One can push the local datatypes config to the remote
+ server - but there is no guarentee these datatypes will be defined
+ there. Alternatively, one can use the remote datatype config - but
+ there is no guarentee that it will contain all the datatypes available
+ to this Galaxy.
+ """
+ use_remote_datatypes = string_as_bool_or_none( lwr_client.destination_params.get( "use_remote_datatypes", False ) )
+ return use_remote_datatypes
+
+ def __build_metadata_configuration(self, client, job_wrapper, remote_metadata, remote_job_config):
+ metadata_kwds = {}
+ if remote_metadata:
+ remote_system_properties = remote_job_config.get("system_properties", {})
+ remote_galaxy_home = remote_system_properties.get("galaxy_home", None)
+ if not remote_galaxy_home:
+ raise Exception(NO_REMOTE_GALAXY_FOR_METADATA_MESSAGE)
+ metadata_kwds['exec_dir'] = remote_galaxy_home
+ outputs_directory = remote_job_config['outputs_directory']
+ configs_directory = remote_job_config['configs_directory']
+ outputs = [Bunch(false_path=os.path.join(outputs_directory, os.path.basename(path)), real_path=path) for path in self.get_output_files(job_wrapper)]
+ metadata_kwds['output_fnames'] = outputs
+ metadata_kwds['config_root'] = remote_galaxy_home
+ default_config_file = os.path.join(remote_galaxy_home, 'universe_wsgi.ini')
+ metadata_kwds['config_file'] = remote_system_properties.get('galaxy_config_file', default_config_file)
+ metadata_kwds['dataset_files_path'] = remote_system_properties.get('galaxy_dataset_files_path', None)
+ if LwrJobRunner.__use_remote_datatypes_conf( client ):
+ remote_datatypes_config = remote_system_properties.get('galaxy_datatypes_config_file', None)
+ if not remote_datatypes_config:
+ log.warn(NO_REMOTE_DATATYPES_CONFIG)
+ remote_datatypes_config = os.path.join(remote_galaxy_home, 'datatypes_conf.xml')
+ metadata_kwds['datatypes_config'] = remote_datatypes_config
+ else:
+ integrates_datatypes_config = self.app.datatypes_registry.integrated_datatypes_configs
+ # Ensure this file gets pushed out to the remote config dir.
+ job_wrapper.extra_filenames.append(integrates_datatypes_config)
+
+ metadata_kwds['datatypes_config'] = os.path.join(configs_directory, os.path.basename(integrates_datatypes_config))
+ return metadata_kwds
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -6,9 +6,9 @@
"""
-from .stager import submit_job, finish_job
+from .stager import submit_job, finish_job, ClientJobDescription
from .client import OutputNotFoundException
from .manager import ClientManager
from .destination import url_to_destination_params
-__all__ = [ClientManager, OutputNotFoundException, url_to_destination_params, finish_job, submit_job]
+__all__ = [ClientManager, OutputNotFoundException, url_to_destination_params, finish_job, submit_job, ClientJobDescription]
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/action_mapper.py
--- a/lib/galaxy/jobs/runners/lwr_client/action_mapper.py
+++ b/lib/galaxy/jobs/runners/lwr_client/action_mapper.py
@@ -21,7 +21,7 @@
>>> from tempfile import NamedTemporaryFile
>>> from os import unlink
>>> f = NamedTemporaryFile(delete=False)
- >>> f.write(json_string)
+ >>> write_result = f.write(json_string.encode('UTF-8'))
>>> f.close()
>>> class MockClient():
... default_file_action = 'none'
@@ -30,23 +30,23 @@
>>> mapper = FileActionMapper(MockClient())
>>> unlink(f.name)
>>> # Test first config line above, implicit path prefix mapper
- >>> mapper.action('/opt/galaxy/tools/filters/catWrapper.py', 'input')
- ('none',)
+ >>> mapper.action('/opt/galaxy/tools/filters/catWrapper.py', 'input')[0] == u'none'
+ True
>>> # Test another (2nd) mapper, this one with a different action
- >>> mapper.action('/galaxy/data/files/000/dataset_1.dat', 'input')
- ('transfer',)
+ >>> mapper.action('/galaxy/data/files/000/dataset_1.dat', 'input')[0] == u'transfer'
+ True
>>> # Always at least copy work_dir outputs.
- >>> mapper.action('/opt/galaxy/database/working_directory/45.sh', 'work_dir')
- ('copy',)
+ >>> mapper.action('/opt/galaxy/database/working_directory/45.sh', 'work_dir')[0] == u'copy'
+ True
>>> # Test glob mapper (matching test)
- >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam', 'input')
- ('copy',)
+ >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam', 'input')[0] == u'copy'
+ True
>>> # Test glob mapper (non-matching test)
- >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam.bai', 'input')
- ('none',)
+ >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam.bai', 'input')[0] == u'none'
+ True
>>> # Regex mapper test.
- >>> mapper.action('/old/galaxy/data/dataset_10245.dat', 'input')
- ('copy',)
+ >>> mapper.action('/old/galaxy/data/dataset_10245.dat', 'input')[0] == u'copy'
+ True
"""
def __init__(self, client):
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/client.py
--- a/lib/galaxy/jobs/runners/lwr_client/client.py
+++ b/lib/galaxy/jobs/runners/lwr_client/client.py
@@ -50,7 +50,7 @@
return "No remote output found for path %s" % self.path
-class Client(object):
+class JobClient(object):
"""
Objects of this client class perform low-level communication with a remote LWR server.
@@ -161,25 +161,23 @@
raise Exception("Unknown output_type returned from LWR server %s" % output_type)
return output_path
- def fetch_work_dir_output(self, source, working_directory, output_path, action='transfer'):
+ def fetch_work_dir_output(self, name, working_directory, output_path, action='transfer'):
"""
Download an output dataset specified with from_work_dir from the
remote server.
**Parameters**
- source : str
+ name : str
Path in job's working_directory to find output in.
working_directory : str
Local working_directory for the job.
output_path : str
Full path to output dataset.
"""
- output = open(output_path, "wb")
- name = os.path.basename(source)
if action == 'transfer':
- self.__raw_download_output(name, self.job_id, "work_dir", output)
- elif action == 'copy':
+ self.__raw_download_output(name, self.job_id, "work_dir", output_path)
+ else: # Even if action is none - LWR has a different work_dir so this needs to be copied.
lwr_path = self._output_path(name, self.job_id, 'work_dir')['path']
self._copy(lwr_path, output_path)
@@ -199,7 +197,7 @@
}
self._raw_execute("download_output", output_params, output_path=output_path)
- def launch(self, command_line):
+ def launch(self, command_line, requirements=[]):
"""
Run or queue up the execution of the supplied
`command_line` on the remote server.
@@ -213,6 +211,8 @@
submit_params = self._submit_params
if submit_params:
launch_params['params'] = dumps(submit_params)
+ if requirements:
+ launch_params['requirements'] = dumps([requirement.to_dict() for requirement in requirements])
return self._raw_execute("launch", launch_params)
def kill(self):
@@ -285,13 +285,13 @@
shutil.copyfile(source, destination)
-class InputCachingClient(Client):
+class InputCachingJobClient(JobClient):
"""
Beta client that cache's staged files to prevent duplication.
"""
def __init__(self, destination_params, job_id, job_manager_interface, client_cacher):
- super(InputCachingClient, self).__init__(destination_params, job_id, job_manager_interface)
+ super(InputCachingJobClient, self).__init__(destination_params, job_id, job_manager_interface)
self.client_cacher = client_cacher
@parseJson()
@@ -326,3 +326,55 @@
@parseJson()
def file_available(self, path):
return self._raw_execute("file_available", {"path": path})
+
+
+class ObjectStoreClient(object):
+
+ def __init__(self, lwr_interface):
+ self.lwr_interface = lwr_interface
+
+ @parseJson()
+ def exists(self, **kwds):
+ return self._raw_execute("object_store_exists", args=self.__data(**kwds))
+
+ @parseJson()
+ def file_ready(self, **kwds):
+ return self._raw_execute("object_store_file_ready", args=self.__data(**kwds))
+
+ @parseJson()
+ def create(self, **kwds):
+ return self._raw_execute("object_store_create", args=self.__data(**kwds))
+
+ @parseJson()
+ def empty(self, **kwds):
+ return self._raw_execute("object_store_empty", args=self.__data(**kwds))
+
+ @parseJson()
+ def size(self, **kwds):
+ return self._raw_execute("object_store_size", args=self.__data(**kwds))
+
+ @parseJson()
+ def delete(self, **kwds):
+ return self._raw_execute("object_store_delete", args=self.__data(**kwds))
+
+ @parseJson()
+ def get_data(self, **kwds):
+ return self._raw_execute("object_store_get_data", args=self.__data(**kwds))
+
+ @parseJson()
+ def get_filename(self, **kwds):
+ return self._raw_execute("object_store_get_filename", args=self.__data(**kwds))
+
+ @parseJson()
+ def update_from_file(self, **kwds):
+ return self._raw_execute("object_store_update_from_file", args=self.__data(**kwds))
+
+ @parseJson()
+ def get_store_usage_percent(self):
+ return self._raw_execute("object_store_get_store_usage_percent", args={})
+
+ def __data(self, **kwds):
+ return kwds
+
+ def _raw_execute(self, command, args={}):
+ return self.lwr_interface.execute(command, args, data=None, input_path=None, output_path=None)
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/destination.py
--- a/lib/galaxy/jobs/runners/lwr_client/destination.py
+++ b/lib/galaxy/jobs/runners/lwr_client/destination.py
@@ -51,9 +51,10 @@
>>> destination_params = {"private_token": "12345", "submit_native_specification": "-q batch"}
>>> result = submit_params(destination_params)
- >>> result.items()
- [('native_specification', '-q batch')]
+ >>> result
+ {'native_specification': '-q batch'}
"""
- return dict([(key[len(SUBMIT_PREFIX):], value)
- for key, value in (destination_params or {}).iteritems()
+ destination_params = destination_params or {}
+ return dict([(key[len(SUBMIT_PREFIX):], destination_params[key])
+ for key in destination_params
if key.startswith(SUBMIT_PREFIX)])
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/manager.py
--- a/lib/galaxy/jobs/runners/lwr_client/manager.py
+++ b/lib/galaxy/jobs/runners/lwr_client/manager.py
@@ -5,10 +5,22 @@
from queue import Queue
from threading import Thread
from os import getenv
-from urllib import urlencode
-from StringIO import StringIO
+try:
+ from urllib import urlencode
+except ImportError:
+ from urllib.parse import urlencode
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+try:
+ from six import text_type
+except ImportError:
+ from galaxy.util import unicodify as text_type
-from .client import Client, InputCachingClient
+from .client import JobClient
+from .client import InputCachingJobClient
+from .client import ObjectStoreClient
from .transport import get_transport
from .util import TransferEventManager
from .destination import url_to_destination_params
@@ -27,10 +39,10 @@
"""
def __init__(self, **kwds):
if 'job_manager' in kwds:
- self.job_manager_interface_class = LocalJobManagerInterface
+ self.job_manager_interface_class = LocalLwrInterface
self.job_manager_interface_args = dict(job_manager=kwds['job_manager'], file_cache=kwds['file_cache'])
else:
- self.job_manager_interface_class = HttpJobManagerInterface
+ self.job_manager_interface_class = HttpLwrInterface
transport_type = kwds.get('transport_type', None)
transport = get_transport(transport_type)
self.job_manager_interface_args = dict(transport=transport)
@@ -40,11 +52,11 @@
if cache:
log.info("Setting LWR client class to caching variant.")
self.client_cacher = ClientCacher(**kwds)
- self.client_class = InputCachingClient
+ self.client_class = InputCachingJobClient
self.extra_client_kwds = {"client_cacher": self.client_cacher}
else:
log.info("Setting LWR client class to standard, non-caching variant.")
- self.client_class = Client
+ self.client_class = JobClient
self.extra_client_kwds = {}
def get_client(self, destination_params, job_id):
@@ -55,11 +67,35 @@
return self.client_class(destination_params, job_id, job_manager_interface, **self.extra_client_kwds)
def __parse_destination_params(self, destination_params):
- if isinstance(destination_params, str) or isinstance(destination_params, unicode):
+ try:
+ unicode_type = unicode
+ except NameError:
+ unicode_type = str
+ if isinstance(destination_params, str) or isinstance(destination_params, unicode_type):
destination_params = url_to_destination_params(destination_params)
return destination_params
+class ObjectStoreClientManager(object):
+
+ def __init__(self, **kwds):
+ if 'object_store' in kwds:
+ self.interface_class = LocalLwrInterface
+ self.interface_args = dict(object_store=kwds['object_store'])
+ else:
+ self.interface_class = HttpLwrInterface
+ transport_type = kwds.get('transport_type', None)
+ transport = get_transport(transport_type)
+ self.interface_args = dict(transport=transport)
+ self.extra_client_kwds = {}
+
+ def get_client(self, client_params):
+ interface_class = self.interface_class
+ interface_args = dict(destination_params=client_params, **self.interface_args)
+ interface = interface_class(**interface_args)
+ return ObjectStoreClient(interface)
+
+
class JobManagerInteface(object):
"""
Abstract base class describes how client communicates with remote job
@@ -76,7 +112,7 @@
"""
-class HttpJobManagerInterface(object):
+class HttpLwrInterface(object):
def __init__(self, destination_params, transport):
self.transport = transport
@@ -92,16 +128,18 @@
def __build_url(self, command, args):
if self.private_key:
args["private_key"] = self.private_key
- data = urlencode(args)
+ arg_bytes = dict([(k, text_type(args[k]).encode('utf-8')) for k in args])
+ data = urlencode(arg_bytes)
url = self.remote_host + command + "?" + data
return url
-class LocalJobManagerInterface(object):
+class LocalLwrInterface(object):
- def __init__(self, destination_params, job_manager, file_cache):
+ def __init__(self, destination_params, job_manager=None, file_cache=None, object_store=None):
self.job_manager = job_manager
self.file_cache = file_cache
+ self.object_store = object_store
def __app_args(self):
## Arguments that would be specified from LwrApp if running
@@ -109,10 +147,12 @@
return {
'manager': self.job_manager,
'file_cache': self.file_cache,
+ 'object_store': self.object_store,
'ip': None
}
def execute(self, command, args={}, data=None, input_path=None, output_path=None):
+ # If data set, should be unicode (on Python 2) or str (on Python 3).
from lwr import routes
from lwr.framework import build_func_args
controller = getattr(routes, command)
@@ -129,9 +169,9 @@
def __build_body(self, data, input_path):
if data is not None:
- return StringIO(data)
+ return BytesIO(data.encode('utf-8'))
elif input_path is not None:
- return open(input_path, 'r')
+ return open(input_path, 'rb')
else:
return None
@@ -188,4 +228,4 @@
int_val = int(val)
return int_val
-__all__ = [ClientManager, HttpJobManagerInterface]
+__all__ = [ClientManager, ObjectStoreClientManager, HttpLwrInterface]
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/stager.py
--- a/lib/galaxy/jobs/runners/lwr_client/stager.py
+++ b/lib/galaxy/jobs/runners/lwr_client/stager.py
@@ -1,12 +1,21 @@
from os.path import abspath, basename, join, exists
from os import listdir, sep
from re import findall
+from re import compile
+from io import open
+from contextlib import contextmanager
from .action_mapper import FileActionMapper
from logging import getLogger
log = getLogger(__name__)
+# All output files marked with from_work_dir attributes will copied or downloaded
+# this pattern picks up attiditional files to copy back - such as those
+# associated with multiple outputs and metadata configuration. Set to .* to just
+# copy everything
+COPY_FROM_WORKING_DIRECTORY_PATTERN = compile(r"primary_.*|galaxy.json|metadata_.*")
+
class JobInputs(object):
"""
@@ -24,22 +33,24 @@
>>> import tempfile
>>> tf = tempfile.NamedTemporaryFile()
>>> def setup_inputs(tf):
- ... open(tf.name, "w").write("world /path/to/input the rest")
- ... inputs = JobInputs("hello /path/to/input", [tf.name])
+ ... open(tf.name, "w").write(u"world /path/to/input the rest")
+ ... inputs = JobInputs(u"hello /path/to/input", [tf.name])
... return inputs
>>> inputs = setup_inputs(tf)
- >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
- >>> inputs.rewritten_command_line
- 'hello C:\\\\input'
- >>> inputs.rewritten_config_files[tf.name]
- 'world C:\\\\input the rest'
+ >>> inputs.rewrite_paths(u"/path/to/input", u'C:\\input')
+ >>> inputs.rewritten_command_line == u'hello C:\\\\input'
+ True
+ >>> inputs.rewritten_config_files[tf.name] == u'world C:\\\\input the rest'
+ True
>>> tf.close()
>>> tf = tempfile.NamedTemporaryFile()
>>> inputs = setup_inputs(tf)
- >>> inputs.find_referenced_subfiles('/path/to')
- ['/path/to/input']
+ >>> inputs.find_referenced_subfiles('/path/to') == [u'/path/to/input']
+ True
>>> inputs.path_referenced('/path/to')
True
+ >>> inputs.path_referenced(u'/path/to')
+ True
>>> inputs.path_referenced('/path/to/input')
True
>>> inputs.path_referenced('/path/to/notinput')
@@ -92,7 +103,7 @@
self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
def __rewrite_config_files(self, local_path, remote_path):
- for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
+ for config_file, rewritten_contents in self.rewritten_config_files.items():
self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
def __items(self):
@@ -140,7 +151,7 @@
For each file that has been transferred and renamed, updated
command_line and configfiles to reflect that rewrite.
"""
- for local_path, remote_path in self.file_renames.iteritems():
+ for local_path, remote_path in self.file_renames.items():
self.job_inputs.rewrite_paths(local_path, remote_path)
def __action(self, path, type):
@@ -154,35 +165,24 @@
**Parameters**
- client : Client
+ client : JobClient
LWR client object.
- command_line : str
- The local command line to execute, this will be rewritten for the remote server.
- config_files : list
- List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
- input_files : list
- List of input files used by job. These will be transferred and references rewritten.
- output_files : list
- List of output_files produced by job.
- tool_dir : str
- Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
- working_directory : str
- Local path created by Galaxy for running this job.
-
+ client_job_description : client_job_description
+ Description of client view of job to stage and execute remotely.
"""
- def __init__(self, client, tool, command_line, config_files, input_files, output_files, working_directory):
+ def __init__(self, client, client_job_description, job_config):
"""
"""
self.client = client
- self.command_line = command_line
- self.config_files = config_files
- self.input_files = input_files
- self.output_files = output_files
- self.tool_id = tool.id
- self.tool_version = tool.version
- self.tool_dir = abspath(tool.tool_dir)
- self.working_directory = working_directory
+ self.command_line = client_job_description.command_line
+ self.config_files = client_job_description.config_files
+ self.input_files = client_job_description.input_files
+ self.output_files = client_job_description.output_files
+ self.tool_id = client_job_description.tool.id
+ self.tool_version = client_job_description.tool.version
+ self.tool_dir = abspath(client_job_description.tool.tool_dir)
+ self.working_directory = client_job_description.working_directory
# Setup job inputs, these will need to be rewritten before
# shipping off to remote LWR server.
@@ -190,7 +190,7 @@
self.transfer_tracker = TransferTracker(client, self.job_inputs)
- self.__handle_setup()
+ self.__handle_setup(job_config)
self.__initialize_referenced_tool_files()
self.__upload_tool_files()
self.__upload_input_files()
@@ -201,8 +201,9 @@
self.__handle_rewrites()
self.__upload_rewritten_config_files()
- def __handle_setup(self):
- job_config = self.client.setup(self.tool_id, self.tool_version)
+ def __handle_setup(self, job_config):
+ if not job_config:
+ job_config = self.client.setup(self.tool_id, self.tool_version)
self.new_working_directory = job_config['working_directory']
self.new_outputs_directory = job_config['outputs_directory']
@@ -283,7 +284,7 @@
self.transfer_tracker.rewrite_input_paths()
def __upload_rewritten_config_files(self):
- for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
+ for config_file, new_config_contents in self.job_inputs.rewritten_config_files.items():
self.client.put_file(config_file, input_type='config', contents=new_config_contents)
def get_rewritten_command_line(self):
@@ -294,32 +295,66 @@
return self.job_inputs.rewritten_command_line
-def finish_job(client, cleanup_job, job_completed_normally, working_directory, work_dir_outputs, output_files):
+def finish_job(client, cleanup_job, job_completed_normally, working_directory, work_dir_outputs, output_files, working_directory_contents=[]):
"""
"""
- action_mapper = FileActionMapper(client)
download_failure_exceptions = []
if job_completed_normally:
- for source_file, output_file in work_dir_outputs:
- try:
+ download_failure_exceptions = __download_results(client, working_directory, work_dir_outputs, output_files, working_directory_contents)
+ return __clean(download_failure_exceptions, cleanup_job, client)
+
+
+def __download_results(client, working_directory, work_dir_outputs, output_files, working_directory_contents):
+ action_mapper = FileActionMapper(client)
+ downloaded_working_directory_files = []
+ exception_tracker = DownloadExceptionTracker()
+
+ # Fetch explicit working directory outputs.
+ for source_file, output_file in work_dir_outputs:
+ name = basename(source_file)
+ with exception_tracker():
+ action = action_mapper.action(output_file, 'output')
+ client.fetch_work_dir_output(name, working_directory, output_file, action[0])
+ downloaded_working_directory_files.append(name)
+ # Remove from full output_files list so don't try to download directly.
+ output_files.remove(output_file)
+
+ # Fetch output files.
+ for output_file in output_files:
+ with exception_tracker():
+ action = action_mapper.action(output_file, 'output')
+ client.fetch_output(output_file, working_directory=working_directory, action=action[0])
+
+ # Fetch remaining working directory outputs of interest.
+ for name in working_directory_contents:
+ if name in downloaded_working_directory_files:
+ continue
+ if COPY_FROM_WORKING_DIRECTORY_PATTERN.match(name):
+ with exception_tracker():
+ output_file = join(working_directory, name)
action = action_mapper.action(output_file, 'output')
- client.fetch_work_dir_output(source_file, working_directory, output_file, action[0])
- except Exception, e:
- download_failure_exceptions.append(e)
- # Remove from full output_files list so don't try to download directly.
- output_files.remove(output_file)
- for output_file in output_files:
- try:
- action = action_mapper.action(output_file, 'output')
- client.fetch_output(output_file, working_directory=working_directory, action=action[0])
- except Exception, e:
- download_failure_exceptions.append(e)
- return __clean(download_failure_exceptions, cleanup_job, client)
+ client.fetch_work_dir_output(name, working_directory, output_file, action=action[0])
+ downloaded_working_directory_files.append(name)
+
+ return exception_tracker.download_failure_exceptions
+
+
+class DownloadExceptionTracker(object):
+
+ def __init__(self):
+ self.download_failure_exceptions = []
+
+ @contextmanager
+ def __call__(self):
+ try:
+ yield
+ except Exception as e:
+ self.download_failure_exceptions.append(e)
def __clean(download_failure_exceptions, cleanup_job, client):
failed = (len(download_failure_exceptions) > 0)
- if not failed or cleanup_job == "always":
+ if (not failed and cleanup_job != "never") or cleanup_job == "always":
try:
client.clean()
except:
@@ -327,25 +362,56 @@
return failed
-def submit_job(client, tool, command_line, config_files, input_files, output_files, working_directory):
+def submit_job(client, client_job_description, job_config=None):
"""
"""
- file_stager = FileStager(client, tool, command_line, config_files, input_files, output_files, working_directory)
+ file_stager = FileStager(client, client_job_description, job_config)
rebuilt_command_line = file_stager.get_rewritten_command_line()
job_id = file_stager.job_id
- client.launch(rebuilt_command_line)
+ client.launch(rebuilt_command_line, requirements=client_job_description.requirements)
return job_id
def _read(path):
"""
Utility method to quickly read small files (config files and tool
- wrappers) into memory as strings.
+ wrappers) into memory as bytes.
"""
- input = open(path, "r")
+ input = open(path, "r", encoding="utf-8")
try:
return input.read()
finally:
input.close()
-__all__ = [submit_job, finish_job]
+
+class ClientJobDescription(object):
+ """ A description of how client views job - command_line, inputs, etc..
+
+ **Parameters**
+
+ command_line : str
+ The local command line to execute, this will be rewritten for the remote server.
+ config_files : list
+ List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
+ input_files : list
+ List of input files used by job. These will be transferred and references rewritten.
+ output_files : list
+ List of output_files produced by job.
+ tool_dir : str
+ Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
+ working_directory : str
+ Local path created by Galaxy for running this job.
+ requirements : list
+ List of requirements for tool execution.
+ """
+
+ def __init__(self, tool, command_line, config_files, input_files, output_files, working_directory, requirements):
+ self.tool = tool
+ self.command_line = command_line
+ self.config_files = config_files
+ self.input_files = input_files
+ self.output_files = output_files
+ self.working_directory = working_directory
+ self.requirements = requirements
+
+__all__ = [submit_job, ClientJobDescription, finish_job]
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/transport/curl.py
--- a/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
@@ -1,4 +1,7 @@
-from cStringIO import StringIO
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
try:
from pycurl import Curl
except:
@@ -25,6 +28,8 @@
c.setopt(c.INFILESIZE, filesize)
if data:
c.setopt(c.POST, 1)
+ if type(data).__name__ == 'unicode':
+ data = data.encode('UTF-8')
c.setopt(c.POSTFIELDS, data)
c.perform()
if not output_path:
diff -r 0e9a8f32b5a9c54a91f2af08b49c944b679937c7 -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- a/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -2,22 +2,33 @@
LWR HTTP Client layer based on Python Standard Library (urllib2)
"""
from __future__ import with_statement
+from os.path import getsize
import mmap
-import urllib2
+try:
+ from urllib2 import urlopen
+except ImportError:
+ from urllib.request import urlopen
+try:
+ from urllib2 import Request
+except ImportError:
+ from urllib.request import Request
class Urllib2Transport(object):
def _url_open(self, request, data):
- return urllib2.urlopen(request, data)
+ return urlopen(request, data)
def execute(self, url, data=None, input_path=None, output_path=None):
- request = urllib2.Request(url=url, data=data)
+ request = Request(url=url, data=data)
input = None
try:
if input_path:
- input = open(input_path, 'rb')
- data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
+ if getsize(input_path):
+ input = open(input_path, 'rb')
+ data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
+ else:
+ data = b""
response = self._url_open(request, data)
finally:
if input:
@@ -26,7 +37,7 @@
with open(output_path, 'wb') as output:
while True:
buffer = response.read(1024)
- if buffer == "":
+ if not buffer:
break
output.write(buffer)
return response
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/c28eeb2c4324/
Changeset: c28eeb2c4324
Branch: page-api
User: Kyle Ellrott
Date: 2013-12-18 01:36:08
Summary: Filling out the page and page revision portions of the api.
Affected #: 5 files
diff -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3178,7 +3178,7 @@
self.openid = openid
class Page( object, Dictifiable ):
- dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug' ]
+ dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug', 'published', 'importable' ]
def __init__( self ):
self.id = None
self.user = None
diff -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -476,6 +476,7 @@
DOMAIN = "page"
FIELDS = {
'id': ViewField('id', sqlalchemy_field=(Page, "id"), id_decode=True),
+ 'slug': ViewField('slug', sqlalchemy_field=(Page, "slug")),
'title': ViewField('title', sqlalchemy_field=(Page, "title")),
}
diff -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 lib/galaxy/webapps/galaxy/api/page_revisions.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -0,0 +1,66 @@
+"""
+API for updating Galaxy Pages
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy.model.search import GalaxySearchEngine
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.exceptions import ItemAccessibilityException
+from galaxy.util.sanitize_html import sanitize_html
+
+log = logging.getLogger( __name__ )
+
+class PageRevisionsController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
+
+ @web.expose_api
+ def index( self, trans, page_id, **kwd ):
+ r = trans.sa_session.query( trans.app.model.PageRevision ).filter_by( page_id=trans.security.decode_id(page_id) )
+ out = []
+ for page in r:
+ if self.security_check( trans, page, True, True ):
+ out.append( self.encode_all_ids( trans, page.to_dict(), True) )
+ return out
+
+
+ @web.expose_api
+ def create( self, trans, page_id, payload, **kwd ):
+ """
+ payload keys:
+ page_id
+ content
+ """
+ user = trans.get_user()
+ error_str = ""
+
+ if not page_id:
+ error_str = "page_id is required"
+ elif not payload.get("content", None):
+ error_str = "content is required"
+ else:
+
+ # Create the new stored page
+ page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id(page_id) )
+ if page is None:
+ return { "error" : "page not found"}
+
+ if not self.security_check( trans, page, True, True ):
+ return { "error" : "page not found"}
+
+ if 'title' in payload:
+ title = payload['title']
+ else:
+ title = page.title
+
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = title
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = payload.get("content", "")
+ # Persist
+ session = trans.sa_session
+ session.flush()
+
+ return {"success" : "revision posted"}
+
+ return { "error" : error_str }
diff -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 lib/galaxy/webapps/galaxy/api/pages.py
--- a/lib/galaxy/webapps/galaxy/api/pages.py
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -1,15 +1,17 @@
"""
-API for searching Galaxy Datasets
+API for updating Galaxy Pages
"""
import logging
from galaxy import web
-from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
from galaxy.model.search import GalaxySearchEngine
+from galaxy.model.item_attrs import UsesAnnotations
from galaxy.exceptions import ItemAccessibilityException
+from galaxy.util.sanitize_html import sanitize_html
log = logging.getLogger( __name__ )
-class PagesController( BaseAPIController, SharableItemSecurityMixin ):
+class PagesController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
@web.expose_api
def index( self, trans, deleted='False', **kwd ):
@@ -22,7 +24,65 @@
@web.expose_api
def create( self, trans, payload, **kwd ):
- return {}
+ """
+ payload keys:
+ slug
+ title
+ content
+ annotation
+ """
+ user = trans.get_user()
+ error_str = ""
+
+ if not payload.get("title", None):
+ error_str = "Page name is required"
+ elif not payload.get("slug", None):
+ error_str = "Page id is required"
+ elif not self._is_valid_slug( payload["slug"] ):
+ error_str = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif trans.sa_session.query( trans.app.model.Page ).filter_by( user=user, slug=payload["slug"], deleted=False ).first():
+ error_str = "Page id must be unique"
+ else:
+ # Create the new stored page
+ page = trans.app.model.Page()
+ page.title = payload['title']
+ page.slug = payload['slug']
+ page_annotation = sanitize_html( payload.get("annotation",""), 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+ page.user = user
+ # And the first (empty) page revision
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = payload['title']
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = payload.get("content", "")
+ # Persist
+ session = trans.sa_session
+ session.add( page )
+ session.flush()
+
+ rval = self.encode_all_ids( trans, page.to_dict(), True)
+ return rval
+
+ return { "error" : error_str }
+
+
+ @web.expose_api
+ def delete( self, trans, id, **kwd ):
+ page_id = id;
+ try:
+ page = trans.sa_session.query(self.app.model.Page).get(trans.security.decode_id(page_id))
+ except Exception, e:
+ return { "error" : "Page with ID='%s' can not be found\n Exception: %s" % (page_id, str( e )) }
+
+ # check to see if user has permissions to selected workflow
+ if page.user != trans.user and not trans.user_is_admin():
+ return { "error" : "Workflow is not owned by or shared with current user" }
+
+ #Mark a workflow as deleted
+ page.deleted = True
+ trans.sa_session.flush()
+ return {"success" : "Deleted", "id" : page_id}
@web.expose_api
def show( self, trans, id, deleted='False', **kwd ):
diff -r 0e7881a4d1cf8dee171a46c60088cfe82df1c9cf -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -171,6 +171,10 @@
#webapp.mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
webapp.mapper.resource( 'search', 'search', path_prefix='/api' )
webapp.mapper.resource( 'page', 'pages', path_prefix="/api")
+ webapp.mapper.resource( 'revision', 'revisions',
+ path_prefix='/api/pages/:page_id',
+ controller='page_revisions',
+ parent_resources=dict( member_name='page', collection_name='pages' ) )
# add as a non-ATOM API call to support the notion of a 'current/working' history unique to the history resource
webapp.mapper.connect( "set_as_current", "/api/histories/{id}/set_as_current",
https://bitbucket.org/galaxy/galaxy-central/commits/aa016b917225/
Changeset: aa016b917225
Branch: page-api
User: Kyle Ellrott
Date: 2013-12-18 23:27:20
Summary: Adding more documentation to newly added API calls
Affected #: 2 files
diff -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 -r aa016b91722549d96e2b2a7d5bfc855cb20b1a6e lib/galaxy/webapps/galaxy/api/page_revisions.py
--- a/lib/galaxy/webapps/galaxy/api/page_revisions.py
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -15,6 +15,16 @@
@web.expose_api
def index( self, trans, page_id, **kwd ):
+ """
+ index( self, trans, page_id, **kwd )
+ * GET /api/pages/{page_id}/revisions
+ return a list of Page revisions
+
+ :param page_id: Display the revisions of Page with ID=page_id
+
+ :rtype: list
+ :returns: dictionaries containing different revisions of the page
+ """
r = trans.sa_session.query( trans.app.model.PageRevision ).filter_by( page_id=trans.security.decode_id(page_id) )
out = []
for page in r:
@@ -26,9 +36,17 @@
@web.expose_api
def create( self, trans, page_id, payload, **kwd ):
"""
- payload keys:
- page_id
- content
+ create( self, trans, page_id, payload **kwd )
+ * POST /api/pages/{page_id}/revisions
+ Create a new revision for a page
+
+ :param page_id: Add revision to Page with ID=page_id
+ :param payload: A dictionary containing::
+ 'title' = New title of the page
+ 'content' = New content of the page
+
+ :rtype: dictionary
+ :returns: Dictionary with 'success' or 'error' element to indicate the result of the request
"""
user = trans.get_user()
error_str = ""
diff -r c28eeb2c4324639c38db4ca5f371d48e1d3084d8 -r aa016b91722549d96e2b2a7d5bfc855cb20b1a6e lib/galaxy/webapps/galaxy/api/pages.py
--- a/lib/galaxy/webapps/galaxy/api/pages.py
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -15,7 +15,19 @@
@web.expose_api
def index( self, trans, deleted='False', **kwd ):
+ """
+ index( self, trans, deleted='False', **kwd )
+ * GET /api/pages
+ return a list of Pages viewable by the user
+
+ :param deleted: Display deleted pages
+
+ :rtype: list
+ :returns: dictionaries containing summary or detailed Page information
+ """
r = trans.sa_session.query( trans.app.model.Page )
+ if not deleted:
+ r = r.filter_by(deleted=False)
out = []
for row in r:
out.append( self.encode_all_ids( trans, row.to_dict(), True) )
@@ -25,11 +37,18 @@
@web.expose_api
def create( self, trans, payload, **kwd ):
"""
- payload keys:
- slug
- title
- content
- annotation
+ create( self, trans, payload, **kwd )
+ * POST /api/pages
+ Create a page and return dictionary containing Page summary
+
+ :param payload: dictionary structure containing::
+ 'slug' = The title slug for the page URL, must be unique
+ 'title' = Title of the page
+ 'content' = HTML contents of the page
+ 'annotation' = Annotation that will be attached to the page
+
+ :rtype: dict
+ :returns: Dictionary return of the Page.to_dict call
"""
user = trans.get_user()
error_str = ""
@@ -69,6 +88,16 @@
@web.expose_api
def delete( self, trans, id, **kwd ):
+ """
+ delete( self, trans, id, **kwd )
+ * DELETE /api/pages/{id}
+ Create a page and return dictionary containing Page summary
+
+ :param id: ID of page to be deleted
+
+ :rtype: dict
+ :returns: Dictionary with 'success' or 'error' element to indicate the result of the request
+ """
page_id = id;
try:
page = trans.sa_session.query(self.app.model.Page).get(trans.security.decode_id(page_id))
@@ -85,7 +114,17 @@
return {"success" : "Deleted", "id" : page_id}
@web.expose_api
- def show( self, trans, id, deleted='False', **kwd ):
+ def show( self, trans, id, **kwd ):
+ """
+ show( self, trans, id, **kwd )
+ * GET /api/pages/{id}
+ View a page summary and the content of the latest revision
+
+ :param id: ID of page to be displayed
+
+ :rtype: dict
+ :returns: Dictionary return of the Page.to_dict call with the 'content' field populated by the most recent revision
+ """
page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
rval = self.encode_all_ids( trans, page.to_dict(), True)
rval['content'] = page.latest_revision.content
https://bitbucket.org/galaxy/galaxy-central/commits/05ff06be8f05/
Changeset: 05ff06be8f05
Branch: page-api
User: Kyle Ellrott
Date: 2013-12-18 23:31:42
Summary: Adding ability to select Pages using the 'deleted' field
Affected #: 1 file
diff -r aa016b91722549d96e2b2a7d5bfc855cb20b1a6e -r 05ff06be8f055d4d52b798615df007e5f0094ecb lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -478,6 +478,7 @@
'id': ViewField('id', sqlalchemy_field=(Page, "id"), id_decode=True),
'slug': ViewField('slug', sqlalchemy_field=(Page, "slug")),
'title': ViewField('title', sqlalchemy_field=(Page, "title")),
+ 'deleted': ViewField('deleted', sqlalchemy_field=(Page, "deleted"))
}
def search(self, trans):
https://bitbucket.org/galaxy/galaxy-central/commits/65a083eee933/
Changeset: 65a083eee933
Branch: page-api
User: Kyle Ellrott
Date: 2013-12-20 20:31:11
Summary: Default merge
Affected #: 84 files
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f doc/source/lib/galaxy.webapps.galaxy.api.rst
--- a/doc/source/lib/galaxy.webapps.galaxy.api.rst
+++ b/doc/source/lib/galaxy.webapps.galaxy.api.rst
@@ -302,6 +302,14 @@
:undoc-members:
:show-inheritance:
+:mod:`lda_datasets` Module
+--------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.lda_datasets
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`libraries` Module
-----------------------
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f install_and_test_tool_shed_repositories.sh
--- a/install_and_test_tool_shed_repositories.sh
+++ b/install_and_test_tool_shed_repositories.sh
@@ -2,14 +2,20 @@
# A good place to look for nose info: http://somethingaboutorange.com/mrl/projects/nose/
-# The test/install_and_test_tool_shed_repositories/functional_tests.py can not be executed directly, because it must have certain functional test definitions
-# in sys.argv. Running it through this shell script is the best way to ensure that it has the required definitions.
+# The test/install_and_test_tool_shed_repositories/functional_tests.py cannot be executed directly because it must
+# have certain functional test definitions in sys.argv. Running it through this shell script is the best way to
+# ensure that it has the required definitions.
-# This script requires the following environment variables:
+# This script requires setting of the following environment variables:
# GALAXY_INSTALL_TEST_TOOL_SHED_API_KEY - must be set to the API key for the tool shed that is being checked.
# GALAXY_INSTALL_TEST_TOOL_SHED_URL - must be set to a URL that the tool shed is listening on.
-# If the tool shed url is not specified in tool_sheds_conf.xml, GALAXY_INSTALL_TEST_TOOL_SHEDS_CONF must be set to a tool sheds configuration file
-# that does specify that url, otherwise repository installation will fail.
+
+# If the tool shed url is not specified in tool_sheds_conf.xml, GALAXY_INSTALL_TEST_TOOL_SHEDS_CONF must be set to
+# a tool sheds configuration file that does specify that url or repository installation will fail.
+
+# This script accepts the command line option -w to select which set of tests to run. The default behavior is to test
+# first tool_dependency_definition repositories and then repositories with tools. Provide the value 'dependencies'
+# to test only tool_dependency_definition repositories or 'tools' to test only repositories with tools.
if [ -z $GALAXY_INSTALL_TEST_TOOL_SHED_API_KEY ] ; then
echo "This script requires the GALAXY_INSTALL_TEST_TOOL_SHED_API_KEY environment variable to be set and non-empty."
@@ -37,7 +43,45 @@
fi
fi
-python test/install_and_test_tool_shed_repositories/functional_tests.py $* -v --with-nosehtml --html-report-file \
- test/install_and_test_tool_shed_repositories/run_functional_tests.html \
- test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py \
- test/functional/test_toolbox.py
+test_tool_dependency_definitions () {
+ # Test installation of repositories of type tool_dependency_definition.
+ python test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py $* -v --with-nosehtml --html-report-file \
+ test/install_and_test_tool_shed_repositories/tool_dependency_definitions/run_functional_tests.html \
+ test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py \
+ test/functional/test_toolbox.py
+}
+
+test_repositories_with_tools () {
+ # Test installation of repositories that contain valid tools with defined functional tests and a test-data directory containing test files.
+ python test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py $* -v --with-nosehtml --html-report-file \
+ test/install_and_test_tool_shed_repositories/repositories_with_tools/run_functional_tests.html \
+ test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py \
+ test/functional/test_toolbox.py
+}
+
+which='both'
+
+while getopts "w:" arg; do
+ case $arg in
+ w)
+ which=$OPTARG
+ ;;
+ esac
+done
+
+case $which in
+ # Use "-w tool_dependency_definitions" when you want to test repositories of type tool_dependency_definition.
+ tool_dependency_definitions)
+ test_tool_dependency_definitions
+ ;;
+ # Use "-w repositories_with_tools" parameter when you want to test repositories that contain tools.
+ repositories_with_tools)
+ test_repositories_with_tools
+ ;;
+ # No received parameters or any received parameter not in [ tool_dependency_definitions, repositories_with_tools ]
+ # will execute both scripts.
+ *)
+ test_tool_dependency_definitions
+ test_repositories_with_tools
+ ;;
+esac
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -19,7 +19,6 @@
import socket
import time
from string import Template
-from itertools import ifilter
import galaxy.datatypes
import galaxy.datatypes.registry
@@ -44,11 +43,6 @@
# Default Value Required for unit tests
datatypes_registry.load_datatypes()
-# When constructing filters with in for a fixed set of ids, maximum
-# number of items to place in the IN statement. Different databases
-# are going to have different limits so it is likely best to not let
-# this be unlimited - filter in Python if over this limit.
-MAX_IN_FILTER_LENGTH = 100
class NoConverterException(Exception):
def __init__(self, value):
@@ -899,32 +893,6 @@
rval = galaxy.datatypes.data.nice_size( rval )
return rval
- def contents_iter( self, **kwds ):
- """
- Fetch filtered list of contents of history.
- """
- python_filter = None
- db_session = object_session( self )
- assert db_session != None
- query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id )
- deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) )
- if deleted is not None:
- query = query.filter( HistoryDatasetAssociation.deleted == bool( kwds['deleted'] ) )
- visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) )
- if visible is not None:
- query = query.filter( HistoryDatasetAssociation.visible == bool( kwds['visible'] ) )
- if 'ids' in kwds:
- ids = kwds['ids']
- max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH)
- if len(ids) < max_in_filter_length:
- query = query.filter( HistoryDatasetAssociation.id.in_(ids) )
- else:
- python_filter = lambda hda: hda.id in ids
- if python_filter:
- return ifilter(python_filter, query)
- else:
- return query
-
def copy_tags_from(self,target_user,source_history):
for src_shta in source_history.tags:
new_shta = src_shta.copy()
@@ -1859,7 +1827,7 @@
class Library( object, Dictifiable ):
permitted_actions = get_permitted_actions( filter='LIBRARY' )
dict_collection_visible_keys = ( 'id', 'name' )
- dict_element_visible_keys = ( 'id', 'deleted', 'name', 'description', 'synopsis' )
+ dict_element_visible_keys = ( 'id', 'deleted', 'name', 'description', 'synopsis', 'root_folder_id' )
def __init__( self, name=None, description=None, synopsis=None, root_folder=None ):
self.name = name or "Unnamed library"
self.description = description
@@ -1926,7 +1894,7 @@
return name
class LibraryFolder( object, Dictifiable ):
- dict_element_visible_keys = ( 'id', 'parent_id', 'name', 'description', 'item_count', 'genome_build' )
+ dict_element_visible_keys = ( 'id', 'parent_id', 'name', 'description', 'item_count', 'genome_build', 'update_time' )
def __init__( self, name=None, description=None, item_count=0, order_id=None ):
self.name = name or "Unnamed folder"
self.description = description
@@ -2092,6 +2060,7 @@
genome_build = ldda.dbkey,
misc_info = ldda.info,
misc_blurb = ldda.blurb,
+ peek = ( lambda ldda: ldda.display_peek() if ldda.peek and ldda.peek != 'no peek' else None )( ldda ),
template_data = template_data )
if ldda.dataset.uuid is None:
rval['uuid'] = None
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1852,8 +1852,9 @@
table = self.table
trans = conn.begin()
try:
- next_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
- table.update( table.c.id == self.id ).execute( hid_counter = ( next_hid + 1 ) )
+ current_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
+ next_hid = current_hid + 1
+ table.update( table.c.id == self.id ).execute( hid_counter = ( next_hid ) )
trans.commit()
return next_hid
except:
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/model/tool_shed_install/__init__.py
--- a/lib/galaxy/model/tool_shed_install/__init__.py
+++ b/lib/galaxy/model/tool_shed_install/__init__.py
@@ -265,8 +265,7 @@
"""Return the repository's tool dependencies that are currently installed, but possibly in an error state."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
- if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
- ToolDependency.installation_status.ERROR ]:
+ if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED ]:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@@ -442,6 +441,16 @@
return dependencies_being_installed
@property
+ def tool_dependencies_installed_or_in_error( self ):
+ """Return the repository's tool dependencies that are currently installed, but possibly in an error state."""
+ installed_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
+ ToolDependency.installation_status.ERROR ]:
+ installed_dependencies.append( tool_dependency )
+ return installed_dependencies
+
+ @property
def tool_dependencies_missing_or_being_installed( self ):
dependencies_missing_or_being_installed = []
for tool_dependency in self.tool_dependencies:
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2733,7 +2733,7 @@
def build_dependency_shell_commands( self ):
"""Return a list of commands to be run to populate the current environment to include this tools requirements."""
if self.tool_shed_repository:
- installed_tool_dependencies = self.tool_shed_repository.installed_tool_dependencies
+ installed_tool_dependencies = self.tool_shed_repository.tool_dependencies_installed_or_in_error
else:
installed_tool_dependencies = None
return self.app.toolbox.dependency_manager.dependency_shell_commands( self.requirements,
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/util/streamball.py
--- a/lib/galaxy/util/streamball.py
+++ b/lib/galaxy/util/streamball.py
@@ -3,6 +3,7 @@
"""
import os
import logging, tarfile
+from galaxy.exceptions import ObjectNotFound
log = logging.getLogger( __name__ )
@@ -14,8 +15,12 @@
self.mode = mode
self.wsgi_status = None
self.wsgi_headeritems = None
- def add( self, file, relpath ):
- self.members[file] = relpath
+ def add( self, file, relpath, check_file=False):
+ if check_file and len(file)>0:
+ if not os.path.isfile(file):
+ raise ObjectNotFound
+ else:
+ self.members[file] = relpath
def stream( self, environ, start_response ):
response_write = start_response( self.wsgi_status, self.wsgi_headeritems )
class tarfileobj:
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -3,6 +3,8 @@
"""
import logging, sys, os, time
+
+from operator import itemgetter
from cgi import escape
from galaxy.util import restore_text, relpath, nice_size, unicodify
from galaxy.web import url_for
@@ -212,6 +214,7 @@
ctime=time.strftime( "%m/%d/%Y %I:%M:%S %p", time.localtime( statinfo.st_ctime ) ) ) )
if not uploads:
rval += '<tr><td colspan="4"><em>Your FTP upload directory contains no files.</em></td></tr>'
+ uploads = sorted(uploads, key=itemgetter("path"))
for upload in uploads:
rval += FTPFileField.trow % ( prefix, self.name, upload['path'], upload['path'], upload['size'], upload['ctime'] )
rval += FTPFileField.tfoot
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -1,5 +1,5 @@
"""
-API operations on the contents of a dataset.
+API operations on the contents of a history dataset.
"""
from galaxy import web
from galaxy.visualization.data_providers.genome import FeatureLocationIndexDataProvider
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/folder_contents.py
--- a/lib/galaxy/webapps/galaxy/api/folder_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/folder_contents.py
@@ -1,5 +1,5 @@
"""
-API operations on the contents of a library.
+API operations on the contents of a folder.
"""
import logging, os, string, shutil, urllib, re, socket
from cgi import escape, FieldStorage
@@ -11,67 +11,122 @@
log = logging.getLogger( __name__ )
class FolderContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
+ """
+ Class controls retrieval, creation and updating of folder contents.
+ """
+
+ def load_folder_contents( self, trans, folder ):
+ """
+ Loads all contents of the folder (folders and data sets) but only in the first level.
+ """
+ current_user_roles = trans.get_current_user_roles()
+ is_admin = trans.user_is_admin()
+ content_items = []
+ for subfolder in folder.active_folders:
+ if not is_admin:
+ can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
+ if (is_admin or can_access) and not subfolder.deleted:
+ subfolder.api_type = 'folder'
+ content_items.append( subfolder )
+ for dataset in folder.datasets:
+ if not is_admin:
+ can_access = trans.app.security_agent.can_access_dataset( current_user_roles, dataset.library_dataset_dataset_association.dataset )
+ if (is_admin or can_access) and not dataset.deleted:
+ dataset.api_type = 'file'
+ content_items.append( dataset )
+ return content_items
@web.expose_api
def index( self, trans, folder_id, **kwd ):
"""
GET /api/folders/{encoded_folder_id}/contents
Displays a collection (list) of a folder's contents (files and folders).
- The /api/library_contents/{encoded_library_id}/contents
- lists everything in a library recursively, which is not what
- we want here. We could add a parameter to use the recursive
- style, but this is meant to act similar to an "ls" directory listing.
+ Encoded folder ID is prepended with 'F' if it is a folder as opposed to a data set which does not have it.
+ Full path is provided as a separate object in response providing data for breadcrumb path building.
"""
- rval = []
+ folder_container = []
current_user_roles = trans.get_current_user_roles()
- def traverse( folder ):
- admin = trans.user_is_admin()
- rval = []
- for subfolder in folder.active_folders:
- if not admin:
- can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
- if (admin or can_access) and not subfolder.deleted:
- subfolder.api_type = 'folder'
- rval.append( subfolder )
- for ld in folder.datasets:
- if not admin:
- can_access = trans.app.security_agent.can_access_dataset( current_user_roles, ld.library_dataset_dataset_association.dataset )
- if (admin or can_access) and not ld.deleted:
- ld.api_type = 'file'
- rval.append( ld )
- return rval
-
- try:
- decoded_folder_id = trans.security.decode_id( folder_id[-16:] )
- except TypeError:
- trans.response.status = 400
- return "Malformed folder id ( %s ) specified, unable to decode." % str( folder_id )
+ if ( folder_id.startswith( 'F' ) ):
+ try:
+ decoded_folder_id = trans.security.decode_id( folder_id[1:] )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed folder id ( %s ) specified, unable to decode." % str( folder_id )
try:
folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( decoded_folder_id )
- parent_library = folder.parent_library
except:
folder = None
- log.error( "FolderContentsController.index: Unable to retrieve folder %s"
- % folder_id )
+ log.error( "FolderContentsController.index: Unable to retrieve folder with ID: %s" % folder_id )
- # TODO: Find the API's path to this folder if necessary.
- # This was needed in recursive descent, but it's not needed
- # for "ls"-style content checking:
- if not folder or not ( trans.user_is_admin() or trans.app.security_agent.can_access_library_item( current_user_roles, folder, trans.user ) ):
+ # We didn't find the folder or user does not have an access to it.
+ if not folder:
trans.response.status = 400
return "Invalid folder id ( %s ) specified." % str( folder_id )
+
+ if not ( trans.user_is_admin() or trans.app.security_agent.can_access_library_item( current_user_roles, folder, trans.user ) ):
+ log.warning( "SECURITY: User (id: %s) without proper access rights is trying to load folder with ID of %s" % ( trans.user.id, folder.id ) )
+ trans.response.status = 400
+ return "Invalid folder id ( %s ) specified." % str( folder_id )
+
+ path_to_root = []
+ def build_path ( folder ):
+ """
+ Search the path upwards recursively and load the whole route of names and ids for breadcrumb purposes.
+ """
+ path_to_root = []
+ # We are almost in root
+ if folder.parent_id is None:
+ path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+ else:
+ # We add the current folder and traverse up one folder.
+ path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+ upper_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder.parent_id )
+ path_to_root.extend( build_path( upper_folder ) )
+ return path_to_root
+
+ # Return the reversed path so it starts with the library node.
+ full_path = build_path( folder )[::-1]
+ folder_container.append( dict( full_path = full_path ) )
+
+ folder_contents = []
+ time_updated = ''
+ time_created = ''
+ # Go through every item in the folder and include its meta-data.
+ for content_item in self.load_folder_contents( trans, folder ):
+# rval = content_item.to_dict()
+ return_item = {}
+ encoded_id = trans.security.encode_id( content_item.id )
+ time_updated = content_item.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+ time_created = content_item.create_time.strftime( "%Y-%m-%d %I:%M %p" )
+
+ # For folder return also hierarchy values
+ if content_item.api_type == 'folder':
+ encoded_id = 'F' + encoded_id
+# time_updated = content_item.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+ return_item.update ( dict ( item_count = content_item.item_count ) )
- for content in traverse( folder ):
- encoded_id = trans.security.encode_id( content.id )
- if content.api_type == 'folder':
- encoded_id = 'F' + encoded_id
- rval.append( dict( id = encoded_id,
- type = content.api_type,
- name = content.name,
- url = url_for( 'folder_contents', folder_id=encoded_id ) ) )
- return rval
+ if content_item.api_type == 'file':
+ library_dataset_dict = content_item.to_dict()
+ library_dataset_dict['data_type']
+ library_dataset_dict['file_size']
+ library_dataset_dict['date_uploaded']
+ return_item.update ( dict ( data_type = library_dataset_dict['data_type'],
+ file_size = library_dataset_dict['file_size'],
+ date_uploaded = library_dataset_dict['date_uploaded'] ) )
+
+ # For every item return also the default meta-data
+ return_item.update( dict( id = encoded_id,
+ type = content_item.api_type,
+ name = content_item.name,
+ time_updated = time_updated,
+ time_created = time_created
+ ) )
+ folder_contents.append( return_item )
+ # Put the data in the container
+ folder_container.append( dict( folder_contents = folder_contents ) )
+ return folder_container
@web.expose_api
def show( self, trans, id, library_id, **kwd ):
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -51,28 +51,47 @@
else:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True )
- contents_kwds = {}
+ # if ids, return _FULL_ data (as show) for each id passed
if ids:
- ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) )
- contents_kwds[ 'ids' ] = ids
- # If explicit ids given, always used detailed result.
- details = 'all'
+ ids = ids.split( ',' )
+ for index, hda in enumerate( history.datasets ):
+ encoded_hda_id = trans.security.encode_id( hda.id )
+ if encoded_hda_id in ids:
+ #TODO: share code with show
+ rval.append( self._detailed_hda_dict( trans, hda ) )
+
+ # if no ids passed, return a _SUMMARY_ of _all_ datasets in the history
else:
- contents_kwds[ 'deleted' ] = kwd.get( 'deleted', None )
- contents_kwds[ 'visible' ] = kwd.get( 'visible', None )
# details param allows a mixed set of summary and detailed hdas
#TODO: this is getting convoluted due to backwards compat
details = kwd.get( 'details', None ) or []
if details and details != 'all':
details = util.listify( details )
- for hda in history.contents_iter( **contents_kwds ):
- encoded_hda_id = trans.security.encode_id( hda.id )
- detailed = details == 'all' or ( encoded_hda_id in details )
- if detailed:
- rval.append( self._detailed_hda_dict( trans, hda ) )
- else:
- rval.append( self._summary_hda_dict( trans, history_id, hda ) )
+ # by default return all datasets - even if deleted or hidden (defaulting the next switches to None)
+ # if specified return those datasets that match the setting
+ # backwards compat
+ return_deleted = util.string_as_bool_or_none( kwd.get( 'deleted', None ) )
+ return_visible = util.string_as_bool_or_none( kwd.get( 'visible', None ) )
+
+ for hda in history.datasets:
+ # if either return_ setting has been requested (!= None), skip hdas that don't match the request
+ if return_deleted is not None:
+ if( ( return_deleted and not hda.deleted )
+ or ( not return_deleted and hda.deleted ) ):
+ continue
+ if return_visible is not None:
+ if( ( return_visible and not hda.visible )
+ or ( not return_visible and hda.visible ) ):
+ continue
+
+ encoded_hda_id = trans.security.encode_id( hda.id )
+ if( ( encoded_hda_id in details )
+ or ( details == 'all' ) ):
+ rval.append( self._detailed_hda_dict( trans, hda ) )
+ else:
+ rval.append( self._summary_hda_dict( trans, history_id, hda ) )
+
except Exception, e:
# for errors that are not specific to one hda (history lookup or summary list)
rval = "Error in history API at listing contents: " + str( e )
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/lda_datasets.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
@@ -0,0 +1,261 @@
+"""
+API operations on the datasets from library.
+"""
+import glob
+import logging
+import operator
+import os
+import os.path
+import string
+import sys
+import tarfile
+import tempfile
+import urllib
+import urllib2
+import zipfile
+from paste.httpexceptions import HTTPBadRequest
+from galaxy.exceptions import ItemAccessibilityException, MessageException, ItemDeletionException, ObjectNotFound
+from galaxy.security import Action
+from galaxy import util, web
+from galaxy.util.streamball import StreamBall
+from galaxy.web.base.controller import BaseAPIController, UsesVisualizationMixin
+
+import logging
+log = logging.getLogger( __name__ )
+
+class LibraryDatasetsController( BaseAPIController, UsesVisualizationMixin ):
+
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ show( self, trans, id, **kwd )
+ * GET /api/libraries/datasets/{encoded_dataset_id}:
+ Displays information about the dataset identified by the encoded id.
+
+
+ :type id: an encoded id string
+ :param id: the encoded id of the dataset to query
+
+ :rtype: dictionary
+ :returns: detailed dataset information from
+ :func:`galaxy.web.base.controller.UsesVisualizationMixin.get_library_dataset.to_dict()`
+ """
+ # Get dataset.
+ try:
+ dataset = self.get_library_dataset( trans, id = id, check_ownership=False, check_accessible=True )
+ except Exception, e:
+ trans.response.status = 500
+ return str( e )
+ try:
+ # Default: return dataset as dict.
+ rval = dataset.to_dict()
+ except Exception, e:
+ rval = "Error in dataset API at listing contents: " + str( e )
+ log.error( rval + ": %s" % str(e), exc_info=True )
+ trans.response.status = 500
+ return "Error in dataset API at listing contents: " + str( e )
+
+ rval['id'] = trans.security.encode_id(rval['id']);
+ rval['ldda_id'] = trans.security.encode_id(rval['ldda_id']);
+ rval['folder_id'] = 'f' + trans.security.encode_id(rval['folder_id'])
+ trans.response.status = 200
+ return rval
+
+ @web.expose
+ def download( self, trans, format, **kwd ):
+ """
+ download( self, trans, format, **kwd )
+ * GET /api/libraries/datasets/download/{format}
+
+ .. code-block::
+ example:
+ GET localhost:8080/api/libraries/datasets/download/tbz?ldda_ids%255B%255D=a0d84b45643a2678&ldda_ids%255B%255D=fe38c84dcd46c828
+
+ :type format: string
+ :param format: string representing requested archive format
+
+ .. note:: supported formats are: zip, tgz, tbz, uncompressed
+
+ :type lddas[]: an array
+ :param lddas[]: an array of encoded ids
+
+ :rtype: file
+ :returns: either archive with the requested datasets packed inside or a single uncompressed dataset
+
+ :raises: MessageException, ItemDeletionException, ItemAccessibilityException, HTTPBadRequest, OSError, IOError, ObjectNotFound
+ """
+ lddas = []
+ datasets_to_download = kwd['ldda_ids%5B%5D']
+
+ if ( datasets_to_download != None ):
+ datasets_to_download = util.listify( datasets_to_download )
+ for dataset_id in datasets_to_download:
+ try:
+ ldda = self.get_hda_or_ldda( trans, hda_ldda='ldda', dataset_id=dataset_id )
+ lddas.append( ldda )
+ except ItemAccessibilityException:
+ trans.response.status = 403
+ return 'Insufficient rights to access library dataset with id: (%s)' % str( dataset_id )
+ except MessageException:
+ trans.response.status = 400
+ return 'Wrong library dataset id: (%s)' % str( dataset_id )
+ except ItemDeletionException:
+ trans.response.status = 400
+ return 'The item with library dataset id: (%s) is deleted' % str( dataset_id )
+ except HTTPBadRequest, e:
+ return 'http bad request' + str( e.err_msg )
+ except Exception, e:
+ trans.response.status = 500
+ return 'error of unknown kind' + str( e )
+
+ if format in [ 'zip','tgz','tbz' ]:
+ # error = False
+ killme = string.punctuation + string.whitespace
+ trantab = string.maketrans(killme,'_'*len(killme))
+ try:
+ outext = 'zip'
+ if format == 'zip':
+ # Can't use mkstemp - the file must not exist first
+ tmpd = tempfile.mkdtemp()
+ util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
+ tmpf = os.path.join( tmpd, 'library_download.' + format )
+ if trans.app.config.upstream_gzip:
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+ else:
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+ archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
+ elif format == 'tgz':
+ if trans.app.config.upstream_gzip:
+ archive = StreamBall( 'w|' )
+ outext = 'tar'
+ else:
+ archive = StreamBall( 'w|gz' )
+ outext = 'tgz'
+ elif format == 'tbz':
+ archive = StreamBall( 'w|bz2' )
+ outext = 'tbz2'
+ except ( OSError, zipfile.BadZipfile ):
+ log.exception( "Unable to create archive for download" )
+ trans.response.status = 500
+ return "Unable to create archive for download, please report this error"
+ except:
+ log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[0] )
+ trans.response.status = 500
+ return "Unable to create archive for download, please report - %s" % sys.exc_info()[0]
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
+ seen = []
+ for ldda in lddas:
+ ext = ldda.extension
+ is_composite = ext in composite_extensions
+ path = ""
+ parent_folder = ldda.library_dataset.folder
+ while parent_folder is not None:
+ # Exclude the now-hidden "root folder"
+ if parent_folder.parent is None:
+ path = os.path.join( parent_folder.library_root[0].name, path )
+ break
+ path = os.path.join( parent_folder.name, path )
+ parent_folder = parent_folder.parent
+ path += ldda.name
+ while path in seen:
+ path += '_'
+ seen.append( path )
+ zpath = os.path.split(path)[-1] # comes as base_name/fname
+ outfname,zpathext = os.path.splitext(zpath)
+ if is_composite: # need to add all the components from the extra_files_path to the zip
+ if zpathext == '':
+ zpath = '%s.html' % zpath # fake the real nature of the html file
+ try:
+ if format=='zip':
+ archive.add( ldda.dataset.file_name, zpath ) # add the primary of a composite set
+ else:
+ archive.add( ldda.dataset.file_name, zpath, check_file=True ) # add the primary of a composite set
+ except IOError:
+ log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name)
+ trans.response.status = 500
+ return "Unable to create archive for download, please report this error"
+ except ObjectNotFound:
+ log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+ trans.response.status = 500
+ return "Requested dataset does not exist on the host."
+ except:
+ trans.response.status = 500
+ return "Unknown error, please report this error"
+ flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
+ for fpath in flist:
+ efp,fname = os.path.split(fpath)
+ if fname > '':
+ fname = fname.translate(trantab)
+ try:
+ if format=='zip':
+ archive.add( fpath,fname )
+ else:
+ archive.add( fpath,fname, check_file=True )
+ except IOError:
+ log.exception( "Unable to add %s to temporary library download archive %s" % (fname,outfname))
+ trans.response.status = 500
+ return "Unable to create archive for download, please report this error"
+ except ObjectNotFound:
+ log.exception( "Requested dataset %s does not exist on the host." % fpath )
+ trans.response.status = 500
+ return "Requested dataset does not exist on the host."
+ except:
+ trans.response.status = 500
+ return "Unknown error, please report this error"
+ else: # simple case
+ try:
+ if format=='zip':
+ archive.add( ldda.dataset.file_name, path )
+ else:
+ archive.add( ldda.dataset.file_name, path, check_file=True )
+ except IOError:
+ log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name)
+ trans.response.status = 500
+ return "Unable to create archive for download, please report this error"
+ except ObjectNotFound:
+ log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+ trans.response.status = 500
+ return "Requested dataset does not exist on the host."
+ except:
+ trans.response.status = 500
+ return "Unknown error, please report this error"
+ lname = 'selected_dataset'
+ fname = lname.replace( ' ', '_' ) + '_files'
+ if format == 'zip':
+ archive.close()
+ trans.response.set_content_type( "application/octet-stream" )
+ trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (fname,outext)
+ archive = util.streamball.ZipBall(tmpf, tmpd)
+ archive.wsgi_status = trans.response.wsgi_status()
+ archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+ trans.response.status = 200
+ return archive.stream
+ else:
+ trans.response.set_content_type( "application/x-tar" )
+ trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (fname,outext)
+ archive.wsgi_status = trans.response.wsgi_status()
+ archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+ trans.response.status = 200
+ return archive.stream
+ elif format == 'uncompressed':
+ if len(lddas) != 1:
+ trans.response.status = 400
+ return 'Wrong request'
+ else:
+ single_dataset = lddas[0]
+ trans.response.set_content_type( single_dataset.get_mime() )
+ fStat = os.stat( ldda.file_name )
+ trans.response.headers[ 'Content-Length' ] = int( fStat.st_size )
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ fname = ldda.name
+ fname = ''.join( c in valid_chars and c or '_' for c in fname )[ 0:150 ]
+ trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s"' % fname
+ try:
+ trans.response.status = 200
+ return open( single_dataset.file_name )
+ except:
+ trans.response.status = 500
+ return 'This dataset contains no content'
+ else:
+ trans.response.status = 400
+ return 'Wrong format parameter specified';
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/libraries.py
--- a/lib/galaxy/webapps/galaxy/api/libraries.py
+++ b/lib/galaxy/webapps/galaxy/api/libraries.py
@@ -49,9 +49,10 @@
trans.model.Library.table.c.id.in_( accessible_restricted_library_ids ) ) )
rval = []
for library in query:
- item = library.to_dict()
+ item = library.to_dict( view='element' )
item['url'] = url_for( route, id=trans.security.encode_id( library.id ) )
- item['id'] = trans.security.encode_id( item['id'] )
+ item['id'] = 'F' + trans.security.encode_id( item['id'] )
+ item['root_folder_id'] = 'F' + trans.security.encode_id( item['root_folder_id'] )
rval.append( item )
return rval
@@ -131,6 +132,9 @@
rval['name'] = name
rval['id'] = encoded_id
return rval
+
+ def edit( self, trans, payload, **kwd ):
+ return "Not implemented yet"
@web.expose_api
def delete( self, trans, id, **kwd ):
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -1,7 +1,7 @@
import urllib
from galaxy import web, util
-from galaxy.web.base.controller import BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesVisualizationMixin
+from galaxy.web.base.controller import BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesVisualizationMixin, UsesHistoryMixin
from galaxy.visualization.genomes import GenomeRegion
from galaxy.util.json import to_json_string, from_json_string
from galaxy.visualization.data_providers.genome import *
@@ -10,7 +10,7 @@
log = logging.getLogger( __name__ )
-class ToolsController( BaseAPIController, UsesVisualizationMixin ):
+class ToolsController( BaseAPIController, UsesVisualizationMixin, UsesHistoryMixin ):
"""
RESTful controller for interactions with tools.
"""
@@ -86,8 +86,7 @@
# dataset upload.
history_id = payload.get("history_id", None)
if history_id:
- target_history = trans.sa_session.query(trans.app.model.History).get(
- trans.security.decode_id(history_id))
+ target_history = self.get_history( trans, history_id )
else:
target_history = None
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -112,8 +112,6 @@
# ------------------------------------------------------------------------------- #
-
-
if 'workflow_id' not in payload:
# create new
if 'installed_repository_file' in payload:
@@ -241,11 +239,15 @@
visit_input_values( tool.inputs, step.state.inputs, callback )
job, out_data = tool.execute( trans, step.state.inputs, history=history)
outputs[ step.id ] = out_data
+
+ # Do post-job actions.
+ replacement_params = payload.get('replacement_params', {})
for pja in step.post_job_actions:
if pja.action_type in ActionBox.immediate_actions:
- ActionBox.execute(self.app, trans.sa_session, pja, job, replacement_dict=None)
+ ActionBox.execute(trans.app, trans.sa_session, pja, job, replacement_dict=replacement_params)
else:
job.add_post_job_action(pja)
+
for v in out_data.itervalues():
rval['outputs'].append(trans.security.encode_id(v.id))
else:
@@ -278,6 +280,10 @@
return("Workflow is not owned by or shared with current user")
ret_dict = self._workflow_to_dict( trans, stored_workflow );
+ if not ret_dict:
+ #This workflow has a tool that's missing from the distribution
+ trans.response.status = 400
+ return "Workflow cannot be exported due to missing tools."
return ret_dict
@web.expose_api
@@ -453,6 +459,8 @@
for step in workflow.steps:
# Load from database representation
module = module_factory.from_workflow_step( trans, step )
+ if not module:
+ return None
### ----------------------------------- ###
## RPARK EDIT ##
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -46,12 +46,6 @@
atexit.register( app.shutdown )
# Create the universe WSGI application
webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' )
- # Handle displaying tool help images and README file images contained in repositories installed from the tool shed.
- webapp.add_route( '/admin_toolshed/static/images/:repository_id/:image_file',
- controller='admin_toolshed',
- action='display_image_in_repository',
- repository_id=None,
- image_file=None )
webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app )
# Force /history to go to /root/history -- needed since the tests assume this
webapp.add_route( '/history', controller='root', action='history' )
@@ -75,22 +69,12 @@
webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' )
webapp.add_route( '/search', controller='search', action='index' )
- # Add the web API
+ # ================
+ # ===== API =====
+ # ================
+
webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app )
- # The /folders section is experimental at this point:
- log.debug( "app.config.api_folders: %s" % app.config.api_folders )
- webapp.mapper.resource( 'folder', 'folders', path_prefix='/api' )
- webapp.mapper.resource( 'content', 'contents',
- controller='folder_contents',
- name_prefix='folder_',
- path_prefix='/api/folders/:folder_id',
- parent_resources=dict( member_name='folder', collection_name='folders' ) )
- webapp.mapper.resource( 'content',
- 'contents',
- controller='library_contents',
- name_prefix='library_',
- path_prefix='/api/libraries/:library_id',
- parent_resources=dict( member_name='library', collection_name='libraries' ) )
+
webapp.mapper.resource( 'content',
'contents',
controller='history_contents',
@@ -102,10 +86,6 @@
controller="datasets",
action="display",
conditions=dict(method=["GET"]))
- webapp.mapper.resource( 'permission',
- 'permissions',
- path_prefix='/api/libraries/:library_id',
- parent_resources=dict( member_name='library', collection_name='libraries' ) )
webapp.mapper.resource( 'user',
'users',
controller='group_users',
@@ -127,11 +107,6 @@
_add_item_tags_controller( webapp,
name_prefix="workflow_",
path_prefix='/api/workflows/:workflow_id' )
-
- _add_item_extended_metadata_controller( webapp,
- name_prefix="library_dataset_",
- path_prefix='/api/libraries/:library_id/contents/:library_content_id' )
-
_add_item_annotation_controller( webapp,
name_prefix="history_content_",
path_prefix='/api/histories/:history_id/contents/:history_content_id' )
@@ -141,7 +116,6 @@
_add_item_annotation_controller( webapp,
name_prefix="workflow_",
path_prefix='/api/workflows/:workflow_id' )
-
_add_item_provenance_controller( webapp,
name_prefix="history_content_",
path_prefix='/api/histories/:history_id/contents/:history_content_id' )
@@ -193,6 +167,64 @@
webapp.mapper.connect("workflow_dict", '/api/workflows/{workflow_id}/download', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
# Preserve the following download route for now for dependent applications -- deprecate at some point
webapp.mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
+
+ # =======================
+ # ===== LIBRARY API =====
+ # =======================
+
+ webapp.mapper.connect( 'show_lda_item',
+ '/api/libraries/datasets/:id',
+ controller='lda_datasets',
+ action='show',
+ conditions=dict( method=[ "GET" ] ) )
+
+ webapp.mapper.connect( 'download_lda_items',
+ '/api/libraries/datasets/download/:format',
+ controller='lda_datasets',
+ action='download',
+ conditions=dict( method=[ "POST", "GET" ] ) )
+
+ webapp.mapper.resource_with_deleted( 'library',
+ 'libraries',
+ path_prefix='/api' )
+ webapp.mapper.resource( 'folder',
+ 'folders',
+ path_prefix='/api' )
+
+ webapp.mapper.resource( 'content',
+ 'contents',
+ controller='folder_contents',
+ name_prefix='folder_',
+ path_prefix='/api/folders/:folder_id',
+ parent_resources=dict( member_name='folder', collection_name='folders' ) )
+
+ webapp.mapper.resource( 'content',
+ 'contents',
+ controller='library_contents',
+ name_prefix='library_',
+ path_prefix='/api/libraries/:library_id',
+ parent_resources=dict( member_name='library', collection_name='libraries' ) )
+
+ webapp.mapper.resource( 'permission',
+ 'permissions',
+ path_prefix='/api/libraries/:library_id',
+ parent_resources=dict( member_name='library', collection_name='libraries' ) )
+
+ _add_item_extended_metadata_controller( webapp,
+ name_prefix="library_dataset_",
+ path_prefix='/api/libraries/:library_id/contents/:library_content_id' )
+
+ # ====================
+ # ===== TOOLSHED =====
+ # ====================
+
+ # Handle displaying tool help images and README file images contained in repositories installed from the tool shed.
+ webapp.add_route( '/admin_toolshed/static/images/:repository_id/:image_file',
+ controller='admin_toolshed',
+ action='display_image_in_repository',
+ repository_id=None,
+ image_file=None )
+
# Galaxy API for tool shed features.
webapp.mapper.resource( 'tool_shed_repository',
'tool_shed_repositories',
@@ -206,6 +238,7 @@
path_prefix='/api',
new={ 'install_repository_revision' : 'POST' },
parent_resources=dict( member_name='tool_shed_repository', collection_name='tool_shed_repositories' ) )
+
# Connect logger from app
if app.trace_logger:
webapp.trace_logger = app.trace_logger
@@ -226,7 +259,7 @@
galaxy.model.mapping.metadata.engine.connection_provider._pool.dispose()
except:
pass
- # Close any pooled database connections before forking
+ # Close any pooled database connections before forking
try:
galaxy.model.tool_shed_install.mapping.metadata.engine.connection_provider._pool.dispose()
except:
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -255,7 +255,7 @@
tool_shed_repository.uninstalled = True
# Remove all installed tool dependencies and tool dependencies stuck in the INSTALLING state, but don't touch any
# repository dependencies.
- tool_dependencies_to_uninstall = tool_shed_repository.installed_tool_dependencies
+ tool_dependencies_to_uninstall = tool_shed_repository.tool_dependencies_installed_or_in_error
tool_dependencies_to_uninstall.extend( tool_shed_repository.tool_dependencies_being_installed )
for tool_dependency in tool_dependencies_to_uninstall:
uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency )
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/controllers/library.py
--- a/lib/galaxy/webapps/galaxy/controllers/library.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library.py
@@ -76,6 +76,17 @@
library_list_grid = LibraryListGrid()
+
+ @web.expose
+ def list( self, trans, **kwd ):
+ params = util.Params( kwd )
+ # define app configuration for generic mako template
+ app = {
+ 'jscript' : "galaxy.library"
+ }
+ # fill template
+ return trans.fill_template('galaxy.panels.mako', config = {'app' : app})
+
@web.expose
def index( self, trans, **kwd ):
params = util.Params( kwd )
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -18,7 +18,7 @@
from galaxy.util import inflector
from galaxy.util.json import to_json_string, from_json_string
from galaxy.util.streamball import StreamBall
-from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin, UsesExtendedMetadataMixin
+from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin, UsesExtendedMetadataMixin, UsesLibraryMixinItems
from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, build_select_field
from galaxy.model.orm import and_, eagerload_all
@@ -65,7 +65,7 @@
except:
pass
-class LibraryCommon( BaseUIController, UsesFormDefinitionsMixin, UsesExtendedMetadataMixin ):
+class LibraryCommon( BaseUIController, UsesFormDefinitionsMixin, UsesExtendedMetadataMixin, UsesLibraryMixinItems ):
@web.json
def library_item_updates( self, trans, ids=None, states=None ):
# Avoid caching
@@ -1750,7 +1750,8 @@
ldda_ids = util.listify( ldda_ids )
for ldda_id in ldda_ids:
try:
- ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
+ # Load the ldda requested and check whether the user has access to them
+ ldda = self.get_library_dataset_dataset_association( trans, ldda_id )
assert not ldda.dataset.purged
lddas.append( ldda )
except:
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -826,19 +826,22 @@
if email is None or activation_token is None:
# We don't have the email or activation_token, show error.
- return trans.show_error_message( "You are using wrong activation link. Try to log-in and we will send you a new activation email.<br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller="root", action="index" )
+ return trans.show_error_message( "You are using wrong activation link. Try to log-in and we will send you a new activation email. <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller="root", action="index" )
else:
# Find the user
user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first()
+ # If the user is active already don't try to activate
+ if user.active == True:
+ return trans.show_ok_message( "Your account is already active. Nothing has changed. <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
if user.activation_token == activation_token:
user.activation_token = None
user.active = True
trans.sa_session.add(user)
trans.sa_session.flush()
- return trans.show_ok_message( "Your account has been successfully activated!<br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
+ return trans.show_ok_message( "Your account has been successfully activated! <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
else:
# Tokens don't match. Activation is denied.
- return trans.show_error_message( "You are using wrong activation link. Try to log in and we will send you a new activation email.<br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
+ return trans.show_error_message( "You are using wrong activation link. Try to log in and we will send you a new activation email. <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
return
def __get_user_type_form_definition( self, trans, user=None, **kwd ):
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -9,6 +9,11 @@
from tool_shed.util import export_util
import tool_shed.util.shed_util_common as suc
+from galaxy import eggs
+eggs.require( 'mercurial' )
+
+from mercurial import hg
+
log = logging.getLogger( __name__ )
@@ -66,6 +71,40 @@
return message
@web.expose_api_anonymous
+ def repository_dependencies( self, trans, id, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
+ Displays information about a repository_metadata record in the Tool Shed.
+
+ :param id: the encoded id of the `RepositoryMetadata` object
+ """
+ # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb12…
+ value_mapper = { 'id' : trans.security.encode_id,
+ 'user_id' : trans.security.encode_id }
+ repository_dependencies_dicts = []
+ try:
+ repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
+ metadata = repository_metadata.metadata
+ if metadata and 'repository_dependencies' in metadata:
+ rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
+ repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ repository_dependency_dict = repository_dependency.to_dict( view='element', value_mapper=value_mapper )
+ # We have to add the changeset_revision of of the repository dependency.
+ repository_dependency_dict[ 'changeset_revision' ] = changeset_revision
+ repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository_dependency.id ) )
+ repository_dependencies_dicts.append( repository_dependency_dict )
+ return repository_dependencies_dicts
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revisions API in repository_dependencies: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api_anonymous
def index( self, trans, **kwd ):
"""
GET /api/repository_revisions
@@ -116,7 +155,7 @@
try:
query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
.filter( and_( *clause_list ) ) \
- .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id ) \
+ .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ) \
.all()
for repository_metadata in query:
repository_metadata_dict = repository_metadata.to_dict( view='collection',
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -94,7 +94,8 @@
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
webapp.mapper.resource( 'repository_revision',
'repository_revisions',
- member={ 'export' : 'POST' },
+ member={ 'repository_dependencies' : 'GET',
+ 'export' : 'POST' },
controller='repository_revisions',
name_prefix='repository_revision_',
path_prefix='/api',
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -244,21 +244,20 @@
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
if self.shed_config_dict.get( 'tool_path' ):
- relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
- found = False
+ relative_install_dir = os.path.join( self.shed_config_dict[ 'tool_path' ], relative_install_dir )
+ tool_config_filename = suc.strip_path( tool_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
if '.hg' in dirs:
dirs.remove( '.hg' )
for name in files:
- if name == tool_config:
- found = True
- break
- if found:
- break
- full_path = str( os.path.abspath( os.path.join( root, name ) ) )
- tool = self.toolbox.load_tool( full_path )
- return suc.generate_tool_guid( repository_clone_url, tool )
+ filename = suc.strip_path( name )
+ if filename == tool_config_filename:
+ full_path = str( os.path.abspath( os.path.join( root, name ) ) )
+ tool = self.toolbox.load_tool( full_path )
+ return suc.generate_tool_guid( repository_clone_url, tool )
+ # Not quite sure what should happen here, throw an exception or what?
+ return None
def get_prior_install_required_dict( self, tool_shed_repositories, repository_dependencies_dict ):
"""
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/tool_shed/scripts/check_tool_dependency_definition_repositories.py
--- a/lib/tool_shed/scripts/check_tool_dependency_definition_repositories.py
+++ b/lib/tool_shed/scripts/check_tool_dependency_definition_repositories.py
@@ -89,8 +89,9 @@
now = strftime( "%Y-%m-%d %H:%M:%S" )
print "#############################################################################"
- print "# %s - Validating repositories of type %s on %s..." % ( now, TOOL_DEPENDENCY_DEFINITION, config_parser.get( config_section, 'host' ) )
- print "# This tool shed is configured to listen on %s:%s" % ( config_parser.get( config_section, 'host' ), config_parser.get( config_section, 'port' ) )
+ print "# %s - Validating repositories of type %s" % ( now, TOOL_DEPENDENCY_DEFINITION )
+ print "# This tool shed is configured to listen on %s:%s" % ( config_parser.get( config_section, 'host' ),
+ config_parser.get( config_section, 'port' ) )
app = RepositoriesApplication( config )
@@ -196,7 +197,16 @@
test_environment_dict[ 'tool_shed_mercurial_version' ] = __version__.version
test_environment_dict[ 'tool_shed_revision' ] = get_repository_current_revision( os.getcwd() )
tool_test_results_dict[ 'test_environment' ] = test_environment_dict
- repository_metadata.tool_test_results = tool_test_results_dict
+ # Store only the configured number of test runs.
+ num_tool_test_results_saved = int( app.config.num_tool_test_results_saved )
+ if len( tool_test_results_dicts ) >= num_tool_test_results_saved:
+ test_results_index = num_tool_test_results_saved - 1
+ new_tool_test_results_dicts = tool_test_results_dicts[ :test_results_index ]
+ else:
+ new_tool_test_results_dicts = [ d for d in tool_test_results_dicts ]
+ # Insert the new element into the first position in the list.
+ new_tool_test_results_dicts.insert( 0, tool_test_results_dict )
+ repository_metadata.tool_test_results = new_tool_test_results_dicts
app.sa_session.add( repository_metadata )
app.sa_session.flush()
stop = time.time()
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/tool_shed/scripts/clean_up_tool_dependency_directory.py
--- a/lib/tool_shed/scripts/clean_up_tool_dependency_directory.py
+++ b/lib/tool_shed/scripts/clean_up_tool_dependency_directory.py
@@ -4,55 +4,27 @@
import shutil
def main( args ):
- if not os.path.exists( args.basepath ):
- print 'Tool dependency path %s does not exist.' % str( args.basepath )
- return 1
- if args.delete:
- print 'Deleting contents of tool dependency path %s.' % args.basepath
- for node in os.listdir( args.basepath ):
- path = os.path.join( args.basepath, node )
- if os.path.isdir( path ):
- try:
- shutil.rmtree( path )
- print 'Deleted directory %s and all its contents.' % path
- except Exception, e:
- print 'Error deleting directory %s: %s' % ( path, str( e ) )
- pass
- elif os.path.isfile( path ):
- try:
- os.remove( path )
- print 'Deleted file %s.' % path
- except Exception, e:
- print 'Error deleting file %s: %s' % ( path, str( e ) )
- pass
- elif os.path.islink( path ):
- print 'Deleting symlink %s with target %s.' % ( path, os.path.realpath( path ) )
- try:
- os.remove( path )
- except Exception, e:
- print 'Error deleting symlink %s: %s' % ( path, str( e ) )
- pass
+ if not os.path.exists( args.tool_dependency_dir ):
+ print 'Tool dependency base path %s does not exist, creating.' % str( args.tool_dependency_dir )
+ os.mkdir( args.tool_dependency_dir )
+ return 0
else:
- print 'Tool dependency path %s contains the following files and directories:' % args.basepath
- for element in os.listdir( args.basepath ):
- print element
- return 0
+ for content in os.listdir( args.tool_dependency_dir ):
+ print 'Deleting directory %s from %s.' % ( content, args.tool_dependency_dir )
+ full_path = os.path.join( args.tool_dependency_dir, content )
+ if os.path.isdir( full_path ):
+ shutil.rmtree( full_path )
+ else:
+ os.remove( full_path )
if __name__ == '__main__':
- description = 'Clean out or list the contents of the provided tool dependency path. Remove if '
- description += 'the --delete command line argument is provided.'
+ description = 'Clean out the configured tool dependency path, creating it if it does not exist.'
parser = argparse.ArgumentParser( description=description )
- parser.add_argument( '--delete',
- dest='delete',
- required=False,
- action='store_true',
- default=False,
- help='Whether to delete all folders and files or list them on exit.' )
- parser.add_argument( '--basepath',
- dest='basepath',
+ parser.add_argument( '--tool_dependency_dir',
+ dest='tool_dependency_dir',
required=True,
action='store',
metavar='name',
- help='The base path where tool dependencies are installed.' )
+ help='The base path where tool dependencies will be installed.' )
args = parser.parse_args()
sys.exit( main( args ) )
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/tool_shed/scripts/show_tool_dependency_installation_dir_contents.py
--- /dev/null
+++ b/lib/tool_shed/scripts/show_tool_dependency_installation_dir_contents.py
@@ -0,0 +1,75 @@
+import argparse
+import os
+import sys
+
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[ 1: ] )
+sys.path = new_path
+
+from galaxy import eggs
+eggs.require( "SQLAlchemy >= 0.4" )
+
+import galaxy.model
+import galaxy.model.tool_shed_install.mapping as install_mapper
+import galaxy.config as galaxy_config
+
+
+class CleanUpDependencyApplication( object ):
+ """Application that enables querying the database using the tool_shed_install model."""
+
+ def __init__( self, config ):
+ self.config = config
+ # Setup the database engine and ORM
+ self.model = install_mapper.init( self.config.database_connection, engine_options={}, create_tables=False )
+
+ @property
+ def sa_session( self ):
+ """Returns a SQLAlchemy session."""
+ return self.model.context.current
+
+ def shutdown( self ):
+ pass
+
+def main( args, app ):
+ if not os.path.exists( args.basepath ):
+ print 'Tool dependency base path %s does not exist.' % str( args.basepath )
+ return
+ print 'Checking tool dependency path %s' % args.basepath
+ tool_dependency_dirs = get_tool_dependency_dirs( app )
+ for tool_dependency_dir in tool_dependency_dirs:
+ path = os.path.join( args.basepath, tool_dependency_dir )
+ if os.path.exists( path ):
+ path_contents = os.listdir( path )
+ if len( path_contents ) > 0:
+ print 'Found non-empty tool dependency installation directory %s.' % path
+ print 'Directory has the following contents: \n %s' % '\n '.join( path_contents )
+
+def get_tool_dependency_dirs( app ):
+ dependency_paths = []
+ for tool_dependency in app.sa_session.query( galaxy.model.tool_shed_install.ToolDependency ).all():
+ dependency_paths.append( tool_dependency.installation_directory( app ) )
+ return dependency_paths
+
+if __name__ == '__main__':
+ description = 'Clean out or list the contents any tool dependency directory under the provided'
+ description += 'tool dependency path. Remove any non-empty directories found if the '
+ description += '--delete command line argument is provided.'
+ parser = argparse.ArgumentParser( description=description )
+ parser.add_argument( '--basepath',
+ dest='basepath',
+ required=True,
+ action='store',
+ metavar='name',
+ help='The base path where tool dependencies are installed.' )
+ parser.add_argument( '--dburi',
+ dest='dburi',
+ required=True,
+ action='store',
+ metavar='dburi',
+ help='The database URI to connect to.' )
+ args = parser.parse_args()
+ database_connection = args.dburi
+ config_dict = dict( database_connection=database_connection, tool_dependency_dir=args.basepath )
+ config = galaxy_config.Configuration( **config_dict )
+ app = CleanUpDependencyApplication( config )
+ sys.exit( main( args, app ) )
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -21,26 +21,29 @@
self.key = key
self.label = label
self.parent = parent
+ self.current_repository_installation_errors = []
+ self.current_repository_successful_installations = []
self.description = None
self.datatypes = []
+ self.failed_tests = []
self.folders = []
+ self.invalid_data_managers = []
self.invalid_repository_dependencies = []
self.invalid_tool_dependencies = []
self.invalid_tools = []
- self.current_repository_installation_errors = []
- self.repository_installation_errors = []
- self.tool_dependency_installation_errors = []
- self.valid_tools = []
- self.valid_data_managers = []
- self.invalid_data_managers = []
- self.tool_dependencies = []
- self.failed_tests = []
self.missing_test_components = []
self.not_tested = []
self.passed_tests = []
+ self.readme_files = []
+ self.repository_dependencies = []
+ self.repository_installation_errors = []
+ self.repository_successful_installations = []
self.test_environments = []
- self.repository_dependencies = []
- self.readme_files = []
+ self.tool_dependencies = []
+ self.tool_dependency_installation_errors = []
+ self.tool_dependency_successful_installations = []
+ self.valid_tools = []
+ self.valid_data_managers = []
self.workflows = []
def contains_folder( self, folder ):
@@ -230,6 +233,17 @@
self.error_message = error_message
+class RepositorySuccessfulInstallation( object ):
+ """Repository installation object"""
+
+ def __init__( self, id=None, tool_shed=None, name=None, owner=None, changeset_revision=None ):
+ self.id = id
+ self.tool_shed = tool_shed
+ self.name = name
+ self.owner = owner
+ self.changeset_revision = changeset_revision
+
+
class TestEnvironment( object ):
"""Tool test environment object"""
@@ -294,6 +308,16 @@
self.error_message = error_message
+class ToolDependencySuccessfulInstallation( object ):
+ """Tool dependency installation object"""
+
+ def __init__( self, id=None, type=None, name=None, version=None, installation_directory=None ):
+ self.id = id
+ self.type = type
+ self.name = name
+ self.version = version
+ self.installation_directory = installation_directory
+
class Workflow( object ):
"""Workflow object."""
@@ -1097,7 +1121,8 @@
# {'python_version': '2.7.4', 'tool_shed_mercurial_version': '2.2.3', 'system': 'Linux 3.8.0-30-generic',
# 'tool_shed_database_version': 21, 'architecture': 'x86_64', 'galaxy_revision': '11573:a62c54ddbe2a',
# 'galaxy_database_version': 117, 'time_tested': '2013-12-03 09:11:48', 'tool_shed_revision': '11556:228156daa575'},
- # 'installation_errors': {'current_repository': [], 'repository_dependencies': [], 'tool_dependencies': []}
+ # 'installation_errors': {'current_repository': [], 'repository_dependencies': [], 'tool_dependencies': []},
+ # 'successful_installations': {'current_repository': [], 'repository_dependencies': [], 'tool_dependencies': []}
# }
test_environment_dict = tool_test_results_dict.get( 'test_environment', None )
if test_environment_dict is None:
@@ -1335,6 +1360,82 @@
version=td_version,
error_message=td_error_message )
tool_dependencies_folder.tool_dependency_installation_errors.append( tool_dependency_installation_error )
+ successful_installation_dict = tool_test_results_dict.get( 'successful_installations', {} )
+ if len( successful_installation_dict ) > 0:
+ # 'successful_installation':
+ # {'current_repository': [],
+ # 'repository_dependencies': [],
+ # 'tool_dependencies':
+ # [{'installation_directory': 'some path' 'type': 'package', 'name': 'MIRA', 'version': '4.0'}]
+ # }
+ current_repository_successful_installation_dicts = successful_installation_dict.get( 'current_repository', [] )
+ repository_dependency_successful_installation_dicts = successful_installation_dict.get( 'repository_dependencies', [] )
+ tool_dependency_successful_installation_dicts = successful_installation_dict.get( 'tool_dependencies', [] )
+ if len( current_repository_successful_installation_dicts ) > 0 or \
+ len( repository_dependency_successful_installation_dicts ) > 0 or \
+ len( tool_dependency_successful_installation_dicts ) > 0:
+ repository_installation_success_id = 0
+ folder_id += 1
+ successful_installation_base_folder = Folder( id=folder_id,
+ key='successful_installations',
+ label='Successful installations',
+ parent=containing_folder )
+ containing_folder.folders.append( successful_installation_base_folder )
+ # Displaying the successful installation of the current repository is not really necessary, so we'll skip it.
+ if len( repository_dependency_successful_installation_dicts ) > 0:
+ folder_id += 1
+ repository_dependencies_folder = Folder( id=folder_id,
+ key='repository_dependency_successful_installations',
+ label='Repository dependencies',
+ parent=successful_installation_base_folder )
+ successful_installation_base_folder.folders.append( repository_dependencies_folder )
+ for repository_dependency_successful_installation_dict in repository_dependency_successful_installation_dicts:
+ repository_installation_success_id += 1
+ try:
+ rd_tool_shed = str( repository_dependency_successful_installation_dict.get( 'tool_shed', '' ) )
+ rd_name = str( repository_dependency_successful_installation_dict.get( 'name', '' ) )
+ rd_owner = str( repository_dependency_successful_installation_dict.get( 'owner', '' ) )
+ rd_changeset_revision = str( repository_dependency_successful_installation_dict.get( 'changeset_revision', '' ) )
+ except Exception, e:
+ rd_tool_shed = 'unknown'
+ rd_name = 'unknown'
+ rd_owner = 'unknown'
+ rd_changeset_revision = 'unknown'
+ repository_installation_success = \
+ RepositoryInstallationSuccess( id=repository_installation_success_id,
+ tool_shed=rd_tool_shed,
+ name=rd_name,
+ owner=rd_owner,
+ changeset_revision=rd_changeset_revision )
+ repository_dependencies_folder.repository_successful_installations.append( repository_installation_success )
+ if len( tool_dependency_successful_installation_dicts ) > 0:
+ # [{'installation_directory': 'some path' 'type': 'package', 'name': 'MIRA', 'version': '4.0'}]
+ folder_id += 1
+ tool_dependencies_folder = Folder( id=folder_id,
+ key='tool_dependency_successful_installations',
+ label='Tool dependencies',
+ parent=successful_installation_base_folder )
+ successful_installation_base_folder.folders.append( tool_dependencies_folder )
+ tool_dependency_error_id = 0
+ for tool_dependency_successful_installation_dict in tool_dependency_successful_installation_dicts:
+ tool_dependency_error_id += 1
+ try:
+ td_type = str( tool_dependency_successful_installation_dict.get( 'type', '' ) )
+ td_name = str( tool_dependency_successful_installation_dict.get( 'name', '' ) )
+ td_version = str( tool_dependency_successful_installation_dict.get( 'version', '' ) )
+ td_installation_directory = tool_dependency_successful_installation_dict.get( 'installation_directory', '' )
+ except Exception, e:
+ td_type = 'unknown'
+ td_name = 'unknown'
+ td_version = 'unknown'
+ td_installation_directory = str( e )
+ tool_dependency_successful_installation = \
+ ToolDependencySuccessfulInstallation( id=tool_dependency_error_id,
+ type=td_type,
+ name=td_name,
+ version=td_version,
+ installation_directory=td_installation_directory )
+ tool_dependencies_folder.tool_dependency_successful_installations.append( tool_dependency_successful_installation )
else:
tool_test_results_root_folder = None
return folder_id, tool_test_results_root_folder
diff -r 05ff06be8f055d4d52b798615df007e5f0094ecb -r 65a083eee9332b2d178126588d536ee4c66fd66f lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -247,12 +247,16 @@
bold_start = ''
bold_end = ''
message = ''
+ if trans.webapp.name == 'galaxy':
+ tip_rev = str( repository.changeset_revision )
+ else:
+ tip_rev = str( repository.tip( trans.app ) )
if not displaying_invalid_tool:
if metadata_dict:
- message += "Metadata may have been defined for some items in revision '%s'. " % str( repository.tip( trans.app ) )
+ message += "Metadata may have been defined for some items in revision '%s'. " % tip_rev
message += "Correct the following problems if necessary and reset metadata.%s" % new_line
else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) )
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % tip_rev
message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
for itc_tup in invalid_file_tups:
tool_file, exception_msg = itc_tup
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/fe71eca6e5b1/
Changeset: fe71eca6e5b1
Branch: page-api
User: Kyle Ellrott
Date: 2013-12-20 21:02:33
Summary: Adding some more page api security checks and fixing deletion filtering
Affected #: 2 files
diff -r 65a083eee9332b2d178126588d536ee4c66fd66f -r fe71eca6e5b19ecbe0be362c9ec84d72ab06e658 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3147,7 +3147,7 @@
self.openid = openid
class Page( object, Dictifiable ):
- dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug', 'published', 'importable' ]
+ dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug', 'published', 'importable', 'deleted' ]
def __init__( self ):
self.id = None
self.user = None
diff -r 65a083eee9332b2d178126588d536ee4c66fd66f -r fe71eca6e5b19ecbe0be362c9ec84d72ab06e658 lib/galaxy/webapps/galaxy/api/pages.py
--- a/lib/galaxy/webapps/galaxy/api/pages.py
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -14,9 +14,9 @@
class PagesController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
@web.expose_api
- def index( self, trans, deleted='False', **kwd ):
+ def index( self, trans, deleted=False, **kwd ):
"""
- index( self, trans, deleted='False', **kwd )
+ index( self, trans, deleted=False, **kwd )
* GET /api/pages
return a list of Pages viewable by the user
@@ -25,12 +25,27 @@
:rtype: list
:returns: dictionaries containing summary or detailed Page information
"""
- r = trans.sa_session.query( trans.app.model.Page )
- if not deleted:
- r = r.filter_by(deleted=False)
out = []
- for row in r:
- out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+
+ if trans.user_is_admin():
+ r = trans.sa_session.query( trans.app.model.Page )
+ if not deleted:
+ r = r.filter_by(deleted=False)
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+ else:
+ user = trans.get_user()
+ r = trans.sa_session.query( trans.app.model.Page ).filter_by( user=user )
+ if not deleted:
+ r = r.filter_by(deleted=False)
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+ r = trans.sa_session.query( trans.app.model.Page ).filter( trans.app.model.Page.user != user ).filter_by(published=True)
+ if not deleted:
+ r = r.filter_by(deleted=False)
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+
return out
@@ -126,6 +141,7 @@
:returns: Dictionary return of the Page.to_dict call with the 'content' field populated by the most recent revision
"""
page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ self.security_check( trans, page, check_ownership=False, check_accessible=True)
rval = self.encode_all_ids( trans, page.to_dict(), True)
rval['content'] = page.latest_revision.content
return rval
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/commits/6a42cefbe721/
Changeset: 6a42cefbe721
Branch: page-api
User: kellrott
Date: 2013-12-27 06:59:40
Summary: Sanitizing incoming page content.
Affected #: 2 files
diff -r fe71eca6e5b19ecbe0be362c9ec84d72ab06e658 -r 6a42cefbe7211bf5fcc0eab68f5c858fb88f3658 lib/galaxy/webapps/galaxy/api/page_revisions.py
--- a/lib/galaxy/webapps/galaxy/api/page_revisions.py
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -70,11 +70,14 @@
else:
title = page.title
+ content = payload.get("content", "")
+ content = sanitize_html( content, 'utf-8', 'text/html' )
+
page_revision = trans.app.model.PageRevision()
page_revision.title = title
page_revision.page = page
page.latest_revision = page_revision
- page_revision.content = payload.get("content", "")
+ page_revision.content = content
# Persist
session = trans.sa_session
session.flush()
diff -r fe71eca6e5b19ecbe0be362c9ec84d72ab06e658 -r 6a42cefbe7211bf5fcc0eab68f5c858fb88f3658 lib/galaxy/webapps/galaxy/api/pages.py
--- a/lib/galaxy/webapps/galaxy/api/pages.py
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -77,6 +77,10 @@
elif trans.sa_session.query( trans.app.model.Page ).filter_by( user=user, slug=payload["slug"], deleted=False ).first():
error_str = "Page id must be unique"
else:
+
+ content = payload.get("content", "")
+ content = sanitize_html( content, 'utf-8', 'text/html' )
+
# Create the new stored page
page = trans.app.model.Page()
page.title = payload['title']
@@ -89,7 +93,7 @@
page_revision.title = payload['title']
page_revision.page = page
page.latest_revision = page_revision
- page_revision.content = payload.get("content", "")
+ page_revision.content = content
# Persist
session = trans.sa_session
session.add( page )
https://bitbucket.org/galaxy/galaxy-central/commits/8e1a2e81499c/
Changeset: 8e1a2e81499c
User: dannon
Date: 2013-12-31 16:04:06
Summary: Merged in kellrott/galaxy-central/page-api (pull request #277)
Page API
Affected #: 5 files
diff -r 84478d0ec6c426180a286a410f733eff63d2f28b -r 8e1a2e81499c780acf08c3f195717a3523f4e55d lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3150,7 +3150,7 @@
self.openid = openid
class Page( object, Dictifiable ):
- dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug' ]
+ dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug', 'published', 'importable', 'deleted' ]
def __init__( self ):
self.id = None
self.user = None
diff -r 84478d0ec6c426180a286a410f733eff63d2f28b -r 8e1a2e81499c780acf08c3f195717a3523f4e55d lib/galaxy/model/search.py
--- a/lib/galaxy/model/search.py
+++ b/lib/galaxy/model/search.py
@@ -495,7 +495,9 @@
DOMAIN = "page"
FIELDS = {
'id': ViewField('id', sqlalchemy_field=(Page, "id"), id_decode=True),
+ 'slug': ViewField('slug', sqlalchemy_field=(Page, "slug")),
'title': ViewField('title', sqlalchemy_field=(Page, "title")),
+ 'deleted': ViewField('deleted', sqlalchemy_field=(Page, "deleted"))
}
def search(self, trans):
diff -r 84478d0ec6c426180a286a410f733eff63d2f28b -r 8e1a2e81499c780acf08c3f195717a3523f4e55d lib/galaxy/webapps/galaxy/api/page_revisions.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -0,0 +1,87 @@
+"""
+API for updating Galaxy Pages
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy.model.search import GalaxySearchEngine
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.exceptions import ItemAccessibilityException
+from galaxy.util.sanitize_html import sanitize_html
+
+log = logging.getLogger( __name__ )
+
+class PageRevisionsController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
+
+ @web.expose_api
+ def index( self, trans, page_id, **kwd ):
+ """
+ index( self, trans, page_id, **kwd )
+ * GET /api/pages/{page_id}/revisions
+ return a list of Page revisions
+
+ :param page_id: Display the revisions of Page with ID=page_id
+
+ :rtype: list
+ :returns: dictionaries containing different revisions of the page
+ """
+ r = trans.sa_session.query( trans.app.model.PageRevision ).filter_by( page_id=trans.security.decode_id(page_id) )
+ out = []
+ for page in r:
+ if self.security_check( trans, page, True, True ):
+ out.append( self.encode_all_ids( trans, page.to_dict(), True) )
+ return out
+
+
+ @web.expose_api
+ def create( self, trans, page_id, payload, **kwd ):
+ """
+ create( self, trans, page_id, payload **kwd )
+ * POST /api/pages/{page_id}/revisions
+ Create a new revision for a page
+
+ :param page_id: Add revision to Page with ID=page_id
+ :param payload: A dictionary containing::
+ 'title' = New title of the page
+ 'content' = New content of the page
+
+ :rtype: dictionary
+ :returns: Dictionary with 'success' or 'error' element to indicate the result of the request
+ """
+ user = trans.get_user()
+ error_str = ""
+
+ if not page_id:
+ error_str = "page_id is required"
+ elif not payload.get("content", None):
+ error_str = "content is required"
+ else:
+
+ # Create the new stored page
+ page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id(page_id) )
+ if page is None:
+ return { "error" : "page not found"}
+
+ if not self.security_check( trans, page, True, True ):
+ return { "error" : "page not found"}
+
+ if 'title' in payload:
+ title = payload['title']
+ else:
+ title = page.title
+
+ content = payload.get("content", "")
+ content = sanitize_html( content, 'utf-8', 'text/html' )
+
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = title
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = content
+ # Persist
+ session = trans.sa_session
+ session.flush()
+
+ return {"success" : "revision posted"}
+
+ return { "error" : error_str }
diff -r 84478d0ec6c426180a286a410f733eff63d2f28b -r 8e1a2e81499c780acf08c3f195717a3523f4e55d lib/galaxy/webapps/galaxy/api/pages.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -0,0 +1,151 @@
+"""
+API for updating Galaxy Pages
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy.model.search import GalaxySearchEngine
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.exceptions import ItemAccessibilityException
+from galaxy.util.sanitize_html import sanitize_html
+
+log = logging.getLogger( __name__ )
+
+class PagesController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
+
+ @web.expose_api
+ def index( self, trans, deleted=False, **kwd ):
+ """
+ index( self, trans, deleted=False, **kwd )
+ * GET /api/pages
+ return a list of Pages viewable by the user
+
+ :param deleted: Display deleted pages
+
+ :rtype: list
+ :returns: dictionaries containing summary or detailed Page information
+ """
+ out = []
+
+ if trans.user_is_admin():
+ r = trans.sa_session.query( trans.app.model.Page )
+ if not deleted:
+ r = r.filter_by(deleted=False)
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+ else:
+ user = trans.get_user()
+ r = trans.sa_session.query( trans.app.model.Page ).filter_by( user=user )
+ if not deleted:
+ r = r.filter_by(deleted=False)
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+ r = trans.sa_session.query( trans.app.model.Page ).filter( trans.app.model.Page.user != user ).filter_by(published=True)
+ if not deleted:
+ r = r.filter_by(deleted=False)
+ for row in r:
+ out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+
+ return out
+
+
+ @web.expose_api
+ def create( self, trans, payload, **kwd ):
+ """
+ create( self, trans, payload, **kwd )
+ * POST /api/pages
+ Create a page and return dictionary containing Page summary
+
+ :param payload: dictionary structure containing::
+ 'slug' = The title slug for the page URL, must be unique
+ 'title' = Title of the page
+ 'content' = HTML contents of the page
+ 'annotation' = Annotation that will be attached to the page
+
+ :rtype: dict
+ :returns: Dictionary return of the Page.to_dict call
+ """
+ user = trans.get_user()
+ error_str = ""
+
+ if not payload.get("title", None):
+ error_str = "Page name is required"
+ elif not payload.get("slug", None):
+ error_str = "Page id is required"
+ elif not self._is_valid_slug( payload["slug"] ):
+ error_str = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif trans.sa_session.query( trans.app.model.Page ).filter_by( user=user, slug=payload["slug"], deleted=False ).first():
+ error_str = "Page id must be unique"
+ else:
+
+ content = payload.get("content", "")
+ content = sanitize_html( content, 'utf-8', 'text/html' )
+
+ # Create the new stored page
+ page = trans.app.model.Page()
+ page.title = payload['title']
+ page.slug = payload['slug']
+ page_annotation = sanitize_html( payload.get("annotation",""), 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+ page.user = user
+ # And the first (empty) page revision
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = payload['title']
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = content
+ # Persist
+ session = trans.sa_session
+ session.add( page )
+ session.flush()
+
+ rval = self.encode_all_ids( trans, page.to_dict(), True)
+ return rval
+
+ return { "error" : error_str }
+
+
+ @web.expose_api
+ def delete( self, trans, id, **kwd ):
+ """
+ delete( self, trans, id, **kwd )
+ * DELETE /api/pages/{id}
+ Create a page and return dictionary containing Page summary
+
+ :param id: ID of page to be deleted
+
+ :rtype: dict
+ :returns: Dictionary with 'success' or 'error' element to indicate the result of the request
+ """
+ page_id = id;
+ try:
+ page = trans.sa_session.query(self.app.model.Page).get(trans.security.decode_id(page_id))
+ except Exception, e:
+ return { "error" : "Page with ID='%s' can not be found\n Exception: %s" % (page_id, str( e )) }
+
+ # check to see if user has permissions to selected workflow
+ if page.user != trans.user and not trans.user_is_admin():
+ return { "error" : "Workflow is not owned by or shared with current user" }
+
+ #Mark a workflow as deleted
+ page.deleted = True
+ trans.sa_session.flush()
+ return {"success" : "Deleted", "id" : page_id}
+
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ show( self, trans, id, **kwd )
+ * GET /api/pages/{id}
+ View a page summary and the content of the latest revision
+
+ :param id: ID of page to be displayed
+
+ :rtype: dict
+ :returns: Dictionary return of the Page.to_dict call with the 'content' field populated by the most recent revision
+ """
+ page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ self.security_check( trans, page, check_ownership=False, check_accessible=True)
+ rval = self.encode_all_ids( trans, page.to_dict(), True)
+ rval['content'] = page.latest_revision.content
+ return rval
\ No newline at end of file
diff -r 84478d0ec6c426180a286a410f733eff63d2f28b -r 8e1a2e81499c780acf08c3f195717a3523f4e55d lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -144,7 +144,11 @@
parent_resources=dict( member_name='datatype', collection_name='datatypes' ) )
#webapp.mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
webapp.mapper.resource( 'search', 'search', path_prefix='/api' )
-
+ webapp.mapper.resource( 'page', 'pages', path_prefix="/api")
+ webapp.mapper.resource( 'revision', 'revisions',
+ path_prefix='/api/pages/:page_id',
+ controller='page_revisions',
+ parent_resources=dict( member_name='page', collection_name='pages' ) )
# add as a non-ATOM API call to support the notion of a 'current/working' history unique to the history resource
webapp.mapper.connect( "set_as_current", "/api/histories/{id}/set_as_current",
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0befff261a71/
Changeset: 0befff261a71
User: nsoranzo
Date: 2013-12-20 17:48:04
Summary: Log a failed chmod action when installing tool dependencies.
Affected #: 1 file
diff -r 0de3c7e7d6327e1af295fc4d81419efd8b5f41af -r 0befff261a714eccb452821c8bf7463a0a8d3fc9 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -698,6 +698,8 @@
for target_file, mode in action_dict[ 'change_modes' ]:
if os.path.exists( target_file ):
os.chmod( target_file, mode )
+ else:
+ log.error( 'Invalid file %s specified, ignoring %s action.', target_file, action_type )
elif action_type == 'download_binary':
url = action_dict[ 'url' ]
target_directory = action_dict.get( 'target_directory', None )
https://bitbucket.org/galaxy/galaxy-central/commits/fa0531f4ee00/
Changeset: fa0531f4ee00
User: dannon
Date: 2013-12-31 15:56:46
Summary: Merged in nsoranzo/galaxy-central (pull request #284)
Log a failed chmod action when installing tool dependencies.
Affected #: 1 file
diff -r c304bf90e8cffa8c39eba2d14d940ac24a67457a -r fa0531f4ee00ed81f9229e02ae847d84b7e0bdf0 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -698,6 +698,8 @@
for target_file, mode in action_dict[ 'change_modes' ]:
if os.path.exists( target_file ):
os.chmod( target_file, mode )
+ else:
+ log.error( 'Invalid file %s specified, ignoring %s action.', target_file, action_type )
elif action_type == 'download_binary':
url = action_dict[ 'url' ]
target_directory = action_dict.get( 'target_directory', None )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5a264a194528/
Changeset: 5a264a194528
User: Gian...(a)crs4.it
Date: 2013-12-20 10:58:08
Summary: Add <version_command>.
Affected #: 2 files
diff -r 3e2b61de5a2322563c1ffa49e03c118144f5068c -r 5a264a194528efdffd6aec050b96a38701a7563e tools/sr_assembly/velvetg.xml
--- a/tools/sr_assembly/velvetg.xml
+++ b/tools/sr_assembly/velvetg.xml
@@ -1,5 +1,6 @@
<tool id="velvetg" name="velvetg" version="1.0.0"><description>Velvet sequence assembler for very short reads</description>
+ <version_command>velvetg 2>&1 | grep "Version" | sed -e 's/Version //'</version_command><command interpreter="python">
velvetg_wrapper.py
'$input.extra_files_path'
diff -r 3e2b61de5a2322563c1ffa49e03c118144f5068c -r 5a264a194528efdffd6aec050b96a38701a7563e tools/sr_assembly/velveth.xml
--- a/tools/sr_assembly/velveth.xml
+++ b/tools/sr_assembly/velveth.xml
@@ -1,5 +1,6 @@
<tool id="velveth" name="velveth" version="1.0.0"><description>Prepare a dataset for the Velvet velvetg Assembler</description>
+ <version_command>velveth 2>&1 | grep "Version" | sed -e 's/Version //'</version_command><command interpreter="python">
velveth_wrapper.py
'$out_file1' '$out_file1.extra_files_path'
https://bitbucket.org/galaxy/galaxy-central/commits/c304bf90e8cf/
Changeset: c304bf90e8cf
User: jmchilton
Date: 2013-12-26 15:41:39
Summary: Merged in gmauro/galaxy-central (pull request #283)
Add <version_command>.
Affected #: 2 files
diff -r 2c64d7fe6a7827b8d2b870c557ffc4d7acc24eca -r c304bf90e8cffa8c39eba2d14d940ac24a67457a tools/sr_assembly/velvetg.xml
--- a/tools/sr_assembly/velvetg.xml
+++ b/tools/sr_assembly/velvetg.xml
@@ -1,5 +1,6 @@
<tool id="velvetg" name="velvetg" version="1.0.0"><description>Velvet sequence assembler for very short reads</description>
+ <version_command>velvetg 2>&1 | grep "Version" | sed -e 's/Version //'</version_command><command interpreter="python">
velvetg_wrapper.py
'$input.extra_files_path'
diff -r 2c64d7fe6a7827b8d2b870c557ffc4d7acc24eca -r c304bf90e8cffa8c39eba2d14d940ac24a67457a tools/sr_assembly/velveth.xml
--- a/tools/sr_assembly/velveth.xml
+++ b/tools/sr_assembly/velveth.xml
@@ -1,5 +1,6 @@
<tool id="velveth" name="velveth" version="1.0.0"><description>Prepare a dataset for the Velvet velvetg Assembler</description>
+ <version_command>velveth 2>&1 | grep "Version" | sed -e 's/Version //'</version_command><command interpreter="python">
velveth_wrapper.py
'$out_file1' '$out_file1.extra_files_path'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4961f4e87947/
Changeset: 4961f4e87947
User: jgoecks
Date: 2013-12-23 17:54:46
Summary: Workflows API, run workflow: enable multiple parameter values to be set for a single step.
Affected #: 1 file
diff -r 31f4f354b66105d5c2bedc36d7fd80b37327b75e -r 4961f4e8794729f899a2901caf613201c7bb9f7a lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -207,15 +207,18 @@
# Update step parameters as directed by payload's parameter mapping.
if step.tool_id in param_map:
- # Get parameter settings.
- change_param = param_map[step.tool_id]['param'];
- change_value = param_map[step.tool_id]['value'];
- step_id = param_map[step.tool_id].get('step_id', '')
+ param_dict = param_map[ step.tool_id ]
+ step_id = param_dict.get( 'step_id', '' )
+
+ # Backward compatibility: convert param/value dict to new 'name': 'value' format.
+ if 'param' in param_dict and 'value' in param_dict:
+ param_dict[ param_dict['param'] ] = param_dict['value']
# Update step if there's no step id (i.e. all steps with tool are
# updated) or update if step ids match.
if not step_id or ( step_id and int( step_id ) == step.id ):
- step.state.inputs[change_param] = change_value
+ for name, value in param_dict.items():
+ step.state.inputs[ name ] = value
if step.tool_errors:
trans.response.status = 400
https://bitbucket.org/galaxy/galaxy-central/commits/2c64d7fe6a78/
Changeset: 2c64d7fe6a78
User: jgoecks
Date: 2013-12-24 02:26:25
Summary: Automated merge.
Affected #: 2 files
diff -r 4961f4e8794729f899a2901caf613201c7bb9f7a -r 2c64d7fe6a7827b8d2b870c557ffc4d7acc24eca lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -1368,11 +1368,11 @@
# 'tool_dependencies':
# [{'installation_directory': 'some path' 'type': 'package', 'name': 'MIRA', 'version': '4.0'}]
# }
- current_repository_successful_installation_dicts = successful_installation_dict.get( 'current_repository', [] )
+ # We won't display the current repository in this container. I fit is not displaying installation errors,
+ # then it must be a successful installation.
repository_dependency_successful_installation_dicts = successful_installation_dict.get( 'repository_dependencies', [] )
tool_dependency_successful_installation_dicts = successful_installation_dict.get( 'tool_dependencies', [] )
- if len( current_repository_successful_installation_dicts ) > 0 or \
- len( repository_dependency_successful_installation_dicts ) > 0 or \
+ if len( repository_dependency_successful_installation_dicts ) > 0 or \
len( tool_dependency_successful_installation_dicts ) > 0:
repository_installation_success_id = 0
folder_id += 1
diff -r 4961f4e8794729f899a2901caf613201c7bb9f7a -r 2c64d7fe6a7827b8d2b870c557ffc4d7acc24eca lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -56,7 +56,7 @@
if str( tool_dependency.status ) != str( status ):
debug_msg = 'Updating an existing record for version %s of tool dependency %s for revision %s of repository %s ' % \
( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
- debug_msg += 'by updating the status from %s to %s.' % str( tool_dependency.status ), str( status )
+ debug_msg += 'by updating the status from %s to %s.' % ( str( tool_dependency.status ), str( status ) )
log.debug( debug_msg )
tool_dependency.status = status
context.add( tool_dependency )
@@ -65,7 +65,7 @@
# Create a new tool_dependency record for the tool_shed_repository.
debug_msg = 'Creating a new record for version %s of tool dependency %s for revision %s of repository %s. ' % \
( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
- debug_msg += 'The statis is being set to %s.' % str( status )
+ debug_msg += 'The status is being set to %s.' % str( status )
log.debug( debug_msg )
tool_dependency = app.install_model.ToolDependency( tool_shed_repository.id, name, version, type, status )
context.add( tool_dependency )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Minor fixes for the tool shed's install and test framework.
by commits-noreply@bitbucket.org 23 Dec '13
by commits-noreply@bitbucket.org 23 Dec '13
23 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ee86b02e26ab/
Changeset: ee86b02e26ab
User: greg
Date: 2013-12-23 13:15:57
Summary: Minor fixes for the tool shed's install and test framework.
Affected #: 2 files
diff -r 31f4f354b66105d5c2bedc36d7fd80b37327b75e -r ee86b02e26ab788122ec940f1f28c51208385cc2 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -1368,11 +1368,11 @@
# 'tool_dependencies':
# [{'installation_directory': 'some path' 'type': 'package', 'name': 'MIRA', 'version': '4.0'}]
# }
- current_repository_successful_installation_dicts = successful_installation_dict.get( 'current_repository', [] )
+ # We won't display the current repository in this container. I fit is not displaying installation errors,
+ # then it must be a successful installation.
repository_dependency_successful_installation_dicts = successful_installation_dict.get( 'repository_dependencies', [] )
tool_dependency_successful_installation_dicts = successful_installation_dict.get( 'tool_dependencies', [] )
- if len( current_repository_successful_installation_dicts ) > 0 or \
- len( repository_dependency_successful_installation_dicts ) > 0 or \
+ if len( repository_dependency_successful_installation_dicts ) > 0 or \
len( tool_dependency_successful_installation_dicts ) > 0:
repository_installation_success_id = 0
folder_id += 1
diff -r 31f4f354b66105d5c2bedc36d7fd80b37327b75e -r ee86b02e26ab788122ec940f1f28c51208385cc2 lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -56,7 +56,7 @@
if str( tool_dependency.status ) != str( status ):
debug_msg = 'Updating an existing record for version %s of tool dependency %s for revision %s of repository %s ' % \
( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
- debug_msg += 'by updating the status from %s to %s.' % str( tool_dependency.status ), str( status )
+ debug_msg += 'by updating the status from %s to %s.' % ( str( tool_dependency.status ), str( status ) )
log.debug( debug_msg )
tool_dependency.status = status
context.add( tool_dependency )
@@ -65,7 +65,7 @@
# Create a new tool_dependency record for the tool_shed_repository.
debug_msg = 'Creating a new record for version %s of tool dependency %s for revision %s of repository %s. ' % \
( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
- debug_msg += 'The statis is being set to %s.' % str( status )
+ debug_msg += 'The status is being set to %s.' % str( status )
log.debug( debug_msg )
tool_dependency = app.install_model.ToolDependency( tool_shed_repository.id, name, version, type, status )
context.add( tool_dependency )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Workflows API, running workflows: (a) improve code documentation and (b) add option to specify step_id when setting parameters so that parameters are set for a single step rather than all steps with a given tool id.
by commits-noreply@bitbucket.org 23 Dec '13
by commits-noreply@bitbucket.org 23 Dec '13
23 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/31f4f354b661/
Changeset: 31f4f354b661
User: jgoecks
Date: 2013-12-23 01:24:37
Summary: Workflows API, running workflows: (a) improve code documentation and (b) add option to specify step_id when setting parameters so that parameters are set for a single step rather than all steps with a given tool id.
Affected #: 1 file
diff -r 9264df2bd5f015e7d8c011683b4f89e6beeacb4a -r 31f4f354b66105d5c2bedc36d7fd80b37327b75e lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -104,15 +104,15 @@
However, we will import them if installed_repository_file is specified
"""
- # ------------------------------------------------------------------------------- #
- ### RPARK: dictionary containing which workflows to change and edit ###
- param_map = {};
- if (payload.has_key('parameters') ):
- param_map = payload['parameters'];
- # ------------------------------------------------------------------------------- #
+ # Pull parameters out of payload.
+ workflow_id = payload['workflow_id']
+ param_map = payload.get('parameters', {})
+ ds_map = payload['ds_map']
+ add_to_history = 'no_add_to_history' not in payload
+ history_param = payload['history']
-
- if 'workflow_id' not in payload:
+ # Get/create workflow.
+ if not workflow_id:
# create new
if 'installed_repository_file' in payload:
workflow_controller = trans.webapp.controllers[ 'workflow' ]
@@ -125,26 +125,31 @@
if 'installed_repository_file' in payload:
trans.response.status = 403
return "installed_repository_file may not be specified with workflow_id"
+
+ # Get workflow + accessibility check.
stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(
- trans.security.decode_id(payload['workflow_id']))
+ trans.security.decode_id(workflow_id))
if stored_workflow.user != trans.user and not trans.user_is_admin():
if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
trans.response.status = 400
return("Workflow is not owned by or shared with current user")
workflow = stored_workflow.latest_workflow
- if payload['history'].startswith('hist_id='):
+
+ # Get target history.
+ if history_param.startswith('hist_id='):
#Passing an existing history to use.
history = trans.sa_session.query(self.app.model.History).get(
- trans.security.decode_id(payload['history'][8:]))
+ trans.security.decode_id(history_param[8:]))
if history.user != trans.user and not trans.user_is_admin():
trans.response.status = 400
return "Invalid History specified."
else:
- history = self.app.model.History(name=payload['history'], user=trans.user)
+ # Send workflow outputs to new history.
+ history = self.app.model.History(name=history_param, user=trans.user)
trans.sa_session.add(history)
trans.sa_session.flush()
- ds_map = payload['ds_map']
- add_to_history = 'no_add_to_history' not in payload
+
+ # Set workflow inputs.
for k in ds_map:
try:
if ds_map[k]['src'] == 'ldda':
@@ -172,6 +177,8 @@
except AssertionError:
trans.response.status = 400
return "Invalid Dataset '%s' Specified" % ds_map[k]['id']
+
+ # Sanity checks.
if not workflow:
trans.response.status = 400
return "Workflow not found."
@@ -184,6 +191,7 @@
if workflow.has_errors:
trans.response.status = 400
return "Workflow cannot be run because of validation errors in some steps"
+
# Build the state for each step
rval = {}
for step in workflow.steps:
@@ -197,16 +205,18 @@
step.module.add_dummy_datasets( connections=step.input_connections )
step.state = step.module.state
- ####################################################
- ####################################################
- # RPARK: IF TOOL_NAME IN PARAMETER MAP #
+ # Update step parameters as directed by payload's parameter mapping.
if step.tool_id in param_map:
+ # Get parameter settings.
change_param = param_map[step.tool_id]['param'];
change_value = param_map[step.tool_id]['value'];
- step.state.inputs[change_param] = change_value;
- ####################################################
- ####################################################
+ step_id = param_map[step.tool_id].get('step_id', '')
+ # Update step if there's no step id (i.e. all steps with tool are
+ # updated) or update if step ids match.
+ if not step_id or ( step_id and int( step_id ) == step.id ):
+ step.state.inputs[change_param] = change_value
+
if step.tool_errors:
trans.response.status = 400
return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
@@ -221,6 +231,7 @@
step.module = module_factory.from_workflow_step( trans, step )
step.state = step.module.get_runtime_state()
step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections )
+
# Run each step, connecting outputs to inputs
workflow_invocation = self.app.model.WorkflowInvocation()
workflow_invocation.workflow = workflow
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Improve logging when creating or updating tool dependency records installed into Galaxy.
by commits-noreply@bitbucket.org 22 Dec '13
by commits-noreply@bitbucket.org 22 Dec '13
22 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9264df2bd5f0/
Changeset: 9264df2bd5f0
User: greg
Date: 2013-12-22 14:13:19
Summary: Improve logging when creating or updating tool dependency records installed into Galaxy.
Affected #: 2 files
diff -r 37ddaa4b6a112c776b5ac011464b680c9cb50545 -r 9264df2bd5f015e7d8c011683b4f89e6beeacb4a lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -88,23 +88,29 @@
# the path defined by required_tool_dependency_env_file_path. It doesn't matter if the required env.sh
# file currently exists..
required_tool_dependency_env_file_path = \
- tool_dependency_util.get_required_repository_package_env_sh_path( app, package_name, package_version, required_repository )
+ tool_dependency_util.get_required_repository_package_env_sh_path( app,
+ package_name,
+ package_version,
+ required_repository )
env_file_builder = fabric_util.EnvFileBuilder( tool_dependency.installation_directory( app ) )
env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path )
return_code = env_file_builder.return_code
if return_code:
- error_message = 'Error defining env.sh file for package %s, return_code: %s' % ( str( package_name ), str( return_code ) )
- tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
+ error_message = 'Error defining env.sh file for package %s, return_code: %s' % \
+ ( str( package_name ), str( return_code ) )
+ tool_dependency = \
+ tool_dependency_util.handle_tool_dependency_installation_error( app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
elif required_tool_dependency is not None and required_tool_dependency.in_error_state:
error_message = "This tool dependency's required tool dependency %s version %s has status %s." % \
( str( required_tool_dependency.name ), str( required_tool_dependency.version ), str( required_tool_dependency.status ) )
- tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
+ tool_dependency = \
+ tool_dependency_util.handle_tool_dependency_installation_error( app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
else:
tool_dependency = \
tool_dependency_util.set_tool_dependency_attributes( app,
diff -r 37ddaa4b6a112c776b5ac011464b680c9cb50545 -r 9264df2bd5f015e7d8c011683b4f89e6beeacb4a lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -51,18 +51,25 @@
else:
tool_dependency = get_tool_dependency_by_name_type_repository( app, tool_shed_repository, name, type )
if tool_dependency:
- log.debug( 'Updating an existing record for tool dependency %s version %s for tool shed repository %s with changeset revision %s.' %
- ( str( name ), str( version ), str( tool_shed_repository.name ), str( tool_shed_repository.changeset_revision ) ) )
# In some cases we should not override the current status of an existing tool_dependency, so do so only if set_status is True.
if set_status:
+ if str( tool_dependency.status ) != str( status ):
+ debug_msg = 'Updating an existing record for version %s of tool dependency %s for revision %s of repository %s ' % \
+ ( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
+ debug_msg += 'by updating the status from %s to %s.' % str( tool_dependency.status ), str( status )
+ log.debug( debug_msg )
tool_dependency.status = status
+ context.add( tool_dependency )
+ context.flush()
else:
# Create a new tool_dependency record for the tool_shed_repository.
- log.debug( 'Creating a new record for tool dependency %s version %s for tool shed repository %s with changeset revision %s.' %
- ( str( name ), str( version ), str( tool_shed_repository.name ), str( tool_shed_repository.changeset_revision ) ) )
+ debug_msg = 'Creating a new record for version %s of tool dependency %s for revision %s of repository %s. ' % \
+ ( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
+ debug_msg += 'The statis is being set to %s.' % str( status )
+ log.debug( debug_msg )
tool_dependency = app.install_model.ToolDependency( tool_shed_repository.id, name, version, type, status )
- context.add( tool_dependency )
- context.flush()
+ context.add( tool_dependency )
+ context.flush()
return tool_dependency
def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Log the location of the exclude list file for the tool shed's install and test framework.
by commits-noreply@bitbucket.org 22 Dec '13
by commits-noreply@bitbucket.org 22 Dec '13
22 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/37ddaa4b6a11/
Changeset: 37ddaa4b6a11
User: greg
Date: 2013-12-22 05:01:55
Summary: Log the location of the exclude list file for the tool shed's install and test framework.
Affected #: 2 files
diff -r 12cbce6f1a20d8eecabc4ef50ca4d44c89eea505 -r 37ddaa4b6a112c776b5ac011464b680c9cb50545 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -64,7 +64,7 @@
# the database, new repositories, etc.
galaxy_test_tmp_dir = os.path.join( test_home_directory, 'tmp' )
# File containing information about problematic repositories to exclude from test runs.
-exclude_list_file = os.path.join( test_home_directory, 'exclude.xml' )
+exclude_list_file = os.path.abspath( os.path.join( test_home_directory, 'exclude.xml' ) )
default_galaxy_locales = 'en'
default_galaxy_test_file_dir = "test-data"
os.environ[ 'GALAXY_INSTALL_TEST_TMP_DIR' ] = galaxy_test_tmp_dir
@@ -145,6 +145,7 @@
if error_message:
return None, error_message
# Handle repositories not to be tested.
+ log.debug( 'The exclude list file is defined as %s' % str( exclude_list_file ) )
if os.path.exists( exclude_list_file ):
log.debug( 'Loading the list of repositories excluded from testing from the file %s...' % str( exclude_list_file ) )
# The following exclude_list will look something like this:
@@ -153,6 +154,7 @@
# ( name, owner, changeset_revision if changeset_revision else None )]}]
exclude_list_dicts = install_and_test_base_util.parse_exclude_list( exclude_list_file )
else:
+ log.debug( 'The exclude list file %s does not exist, so no repositories will be excluded from testing.' % str( exclude_list_file ) )
exclude_list_dicts = []
# Generate a test method that will use Twill to install each repository into the embedded Galaxy application that was
# started up, installing repository and tool dependencies. Upon successful installation, generate a test case for each
diff -r 12cbce6f1a20d8eecabc4ef50ca4d44c89eea505 -r 37ddaa4b6a112c776b5ac011464b680c9cb50545 test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
@@ -62,7 +62,7 @@
# the database, new repositories, etc.
galaxy_test_tmp_dir = os.path.join( test_home_directory, 'tmp' )
# File containing information about problematic repositories to exclude from test runs.
-exclude_list_file = os.path.join( test_home_directory, 'exclude.xml' )
+exclude_list_file = os.path.abspath( os.path.join( test_home_directory, 'exclude.xml' ) )
default_galaxy_locales = 'en'
default_galaxy_test_file_dir = "test-data"
os.environ[ 'GALAXY_INSTALL_TEST_TMP_DIR' ] = galaxy_test_tmp_dir
@@ -83,6 +83,7 @@
install_and_test_base_util.get_repositories_to_install( install_and_test_base_util.galaxy_tool_shed_url, test_framework )
if error_message:
return None, error_message
+ log.debug( 'The exclude list file is defined as %s' % str( exclude_list_file ) )
if os.path.exists( exclude_list_file ):
log.debug( 'Loading the list of repositories excluded from testing from the file %s...' % str( exclude_list_file ) )
# The following exclude_list will look something like this:
@@ -91,6 +92,7 @@
# ( name, owner, changeset_revision if changeset_revision else None )]}]
exclude_list_dicts = install_and_test_base_util.parse_exclude_list( exclude_list_file )
else:
+ log.debug( 'The exclude list file %s does not exist, so no repositories will be excluded from testing.' % str( exclude_list_file ) )
exclude_list_dicts = []
# Generate a test method that will use Twill to install each repository into the embedded Galaxy application that was
# started up, installing repository and tool dependencies. Upon successful installation, generate a test case for each
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
22 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/12cbce6f1a20/
Changeset: 12cbce6f1a20
User: greg
Date: 2013-12-22 02:39:36
Summary: Fix for my last commit.
Affected #: 1 file
diff -r 23d547cfc2d1007e7c82e8f018d087c18642dc49 -r 12cbce6f1a20d8eecabc4ef50ca4d44c89eea505 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -197,7 +197,7 @@
( changeset_revision, name, owner ) )
else:
# See if the repository was installed in a previous test.
- repository = install_and_test_base_utilget_repository( name, owner, changeset_revision )
+ repository = install_and_test_base_util.get_repository( name, owner, changeset_revision )
if repository is None:
# The repository was not previously installed, so install it now.
tool_test_results_dict = install_and_test_base_util.initialize_tool_tests_results_dict( app, tool_test_results_dict )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix prep scripts for the tool shed's install and test framework.
by commits-noreply@bitbucket.org 22 Dec '13
by commits-noreply@bitbucket.org 22 Dec '13
22 Dec '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/23d547cfc2d1/
Changeset: 23d547cfc2d1
User: greg
Date: 2013-12-22 02:31:18
Summary: Fix prep scripts for the tool shed's install and test framework.
Affected #: 2 files
diff -r 0f70f0b461678cd8224861857055f3eaf7646b05 -r 23d547cfc2d1007e7c82e8f018d087c18642dc49 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -197,7 +197,7 @@
( changeset_revision, name, owner ) )
else:
# See if the repository was installed in a previous test.
- repository = get_repository( name, owner, changeset_revision )
+ repository = install_and_test_base_utilget_repository( name, owner, changeset_revision )
if repository is None:
# The repository was not previously installed, so install it now.
tool_test_results_dict = install_and_test_base_util.initialize_tool_tests_results_dict( app, tool_test_results_dict )
diff -r 0f70f0b461678cd8224861857055f3eaf7646b05 -r 23d547cfc2d1007e7c82e8f018d087c18642dc49 test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
@@ -135,7 +135,7 @@
( changeset_revision, name, owner ) )
else:
# See if the repository was installed in a previous test.
- repository = get_repository( name, owner, changeset_revision )
+ repository = install_and_test_base_util.get_repository( name, owner, changeset_revision )
if repository is None:
# The repository was not previously installed, so install it now.
tool_test_results_dict = install_and_test_base_util.initialize_tool_tests_results_dict( app, tool_test_results_dict )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0