Dear all, I'm getting the following error when trying to follow the instructions in the API README file. Universe_wsgi.ini is pasted below. [galaxy@zeus api]$ ./display.py c43c7605ba60633a2ed60f46c12f354e http://zeus.ex.ac.uk:4000/api/libraries <urlopen error (111, 'Connection refused')> Any suggestions would be very welcome. All the very best, Konrad. Dr Konrad Paszkiewicz Exeter Sequencing Service & Exeter Bioinformatics Service Biosciences, College of Life and Environmental Sciences University of Exeter Exeter EX4 4QD. http://biosciences.exeter.ac.uk/facilities/sequencing/illumina/ http://biosciences.exeter.ac.uk/facilities/exbiss/ # # Galaxy is configured by default to be useable in a single-user development # environment. To tune the application for a multi-user production # environment, see the documentation at: # # http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ProductionServer # # Throughout this sample configuration file, except where stated otherwise, # uncommented values override the default if left unset, whereas commented # values are set to the default value. # examples of many of these options are explained in more detail in the wiki: # # Config hackers are encouraged to check there before asking for help. # ---- HTTP Server ---------------------------------------------------------- # Configuration of the internal HTTP server. [server:main] # The internal HTTP server to use. Currently only Paste is provided. This # option is required. use = egg:Paste#http # The port on which to listen. port = 4000 #port = 80 # The address on which to listen. By default, only listen to localhost (Galaxy # will not be accessible over the network). Use '0.0.0.0' to listen on all # available network interfaces. host = 144.173.143.28 # Use a threadpool for the web server instead of creating a thread for each # request. use_threadpool = True # Number of threads in the web server thread pool. #threadpool_workers = 10 # ---- Filters -------------------------------------------------------------- # Filters sit between Galaxy and the HTTP server. # These filters are disabled by default. They can be enabled with # 'filter-with' in the [app:main] section below. # Define the gzip filter. [filter:gzip] use = egg:Paste#gzip # Define the proxy-prefix filter. [filter:proxy-prefix] use = egg:PasteDeploy#prefix prefix = /galaxy # ---- Galaxy --------------------------------------------------------------- # Configuration of the Galaxy application. [app:main] # -- Application and filtering # The factory for the WSGI application. This should not be changed. paste.app_factory = galaxy.web.buildapp:app_factory # If not running behind a proxy server, you may want to enable gzip compression # to decrease the size of data transferred over the network. If using a proxy # server, please enable gzip compression there instead. #filter-with = gzip # If running behind a proxy server and Galaxy is served from a subdirectory, # enable the proxy-prefix filter and set the prefix in the # [filter:proxy-prefix] section above. #filter-with = # If proxy-prefix is enabled and you're running more than one Galaxy instance # behind one hostname, you will want to set this to the same path as the prefix # in the filter above. This value becomes the "path" attribute set in the # cookie so the cookies from each instance will not clobber each other. #cookie_path = None # -- Database # By default, Galaxy uses a SQLite database at 'database/universe.sqlite'. You # may use a SQLAlchemy connection string to specify an external database # instead. This string takes many options which are explained in detail in the # config file documentation. database_connection = mysql://galaxy@localhost/galaxy?unix_socket=/var/lib/mysql/mysql.sock #database_connection = sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE # If the server logs errors about not having enough database pool connections, # you will want to increase these values, or consider running more Galaxy # processes. #database_engine_option_pool_size = 5 #database_engine_option_max_overflow = 10 # If using MySQL and the server logs the error "MySQL server has gone away", # you will want to set this to some positive value (7200 should work). #database_engine_option_pool_recycle = -1 # If large database query results are causing memory or response time issues in # the Galaxy process, leave the result on the server instead. This option is # only available for PostgreSQL and is highly recommended. #database_engine_option_server_side_cursors = False # Create only one connection to the database per thread, to reduce the # connection overhead. Recommended when not using SQLite: #database_engine_option_strategy = threadlocal # Log all database transactions, can be useful for debugging and performance # profiling. Logging is done via Python's 'logging' module under the qualname # 'galaxy.model.orm.logging_connection_proxy' #database_query_profiling_proxy = False # -- Files and directories # Dataset files are stored in this directory. #file_path = database/files # Temporary files are stored in this directory. #new_file_path = database/tmp # Tool config file, defines what tools are available in Galaxy. #tool_config_file = tool_conf.xml # Path to the directory containing the tools defined in the config. #tool_path = tools # Directory where data used by tools is located, see the samples in that # directory and the wiki for help: # http://bitbucket.org/galaxy/galaxy-central/wiki/DataIntegration #tool_data_path = tool-data # Datatypes config file, defines what data (file) types are available in # Galaxy. #datatypes_config_file = datatypes_conf.xml # Each job is given a unique empty directory as its current working directory. # This option defines in what parent directory those directories will be # created. #job_working_directory = database/job_working_directory # If using a cluster, Galaxy will write job scripts and stdout/stderr to this # directory. #cluster_files_directory = database/pbs # Sequencer types config file, defines what types of sequencers configurations # are available in Galaxy. sequencer_type_config_file = sequencer_types_conf.xml # Path to the directory containing the sequencer_types defined in the config. sequencer_type_path = sequencer_types # -- Mail and notification # Galaxy sends mail for various things: Subscribing users to the mailing list # if they request it, emailing password resets, notification from the Galaxy # Sample Tracking system, and reporting dataset errors. To do this, it needs # to send mail through an SMTP server, which you may define here. smtp_server = mail.ex.ac.uk # On the user registration form, users may choose to join the mailing list. # This is the address of the list they'll be subscribed to. #mailing_join_addr = galaxy-user-join@bx.psu.edu # Datasets in an error state include a link to report the error. Those reports # will be sent to this address. Error reports are disabled if no address is set. #error_email_to = None # -- Display sites # Galaxy can display data at various external browsers. These options specify # which browsers should be available. URLs and builds available at these # browsers are defined in the specifield files. # UCSC browsers: tool-data/shared/ucsc/ucsc_build_sites.txt #ucsc_display_sites = main,test,archaea,ucla # GBrowse servers: tool-data/shared/gbrowse/gbrowse_build_sites.txt #gbrowse_display_sites = wormbase,tair,modencode_worm,modencode_fly # GeneTrack servers: tool-data/shared/genetrack/genetrack_sites.txt #genetrack_display_sites = main,test # -- UI Localization # Append "/{brand}" to the "Galaxy" text in the masthead. brand = Exeter SysBio # The URL linked by the "Galaxy/brand" text. #logo_url = / # The URL linked by the "Galaxy Wiki" link in the "Help" menu. #wiki_url = http://bitbucket.org/galaxy/galaxy-central/wiki # The URL linked by the "Email comments..." link in the "Help" menu. #bugs_email = mailto:galaxy-bugs@bx.psu.edu # The URL linked by the "How to Cite..." link in the "Help" menu. #citation_url = http://bitbucket.org/galaxy/galaxy-central/wiki/Citations # Serve static content, which must be enabled if you're not serving it via a # proxy server. These options should be self explanatory and so are not # documented individually. You can use these paths (or ones in the proxy # server) to point to your own styles. static_enabled = True static_cache_time = 360 static_dir = %(here)s/static/ static_images_dir = %(here)s/static/images static_favicon_dir = %(here)s/static/favicon.ico static_scripts_dir = %(here)s/static/scripts/ static_style_dir = %(here)s/static/june_2007_style/blue # -- Advanced proxy features # For help on configuring the Advanced proxy features, see: # http://usegalaxy.org/production # Apache can handle file downloads (Galaxy-to-user) via mod_xsendfile. Set # this to True to inform Galaxy that mod_xsendfile is enabled upstream. #apache_xsendfile = False # The same download handling can be done by nginx using X-Accel-Redirect. This # should be set to the path defined in the nginx config as an internal redirect # with access to Galaxy's data files (see documentation linked above). #nginx_x_accel_redirect_base = False # nginx can make use of mod_zip to create zip files containing multiple library # files. If using X-Accel-Redirect, this can be the same value as that option. #nginx_x_archive_files_base = False # If using compression in the upstream proxy server, use this option to disable # gzipping of library .tar.gz and .zip archives, since the proxy server will do # it faster on the fly. #upstream_gzip = False # nginx can also handle file uploads (user-to-Galaxy) via nginx_upload_module. # Configuration for this is complex and explained in detail in the # documentation linked above. The upload store is a temporary directory in # which files uploaded by the upload module will be placed. #nginx_upload_store = False # This value overrides the action set on the file upload form, e.g. the web # path where the nginx_upload_module has been configured to intercept upload # requests. #nginx_upload_path = False # -- Logging and Debugging # Settings for Next gen LIMS interface on top of existing Galaxy Sample/Request # management code. use_nglims=True nglims_config_file=tool-data/nglims.yaml # Verbosity of console log messages. Acceptable values can be found here: # http://docs.python.org/library/logging.html#logging-levels log_level = DEBUG # Print database operations to the server log (warning, quite verbose!). database_engine_option_echo = False # Print database pool operations to the server log (warning, quite verbose!). database_engine_option_echo_pool = False # Turn on logging of application events and some user events to the database. log_events = False # Turn on logging of user actions to the database. Actions currently logged are # grid views, tool searches, and use of "recently" used tools menu. The # log_events and log_actions functionality will eventually be merged. log_actions = False # Debug enables access to various config options useful for development and # debugging: use_lint, use_profile, use_printdebug and use_interactive. It # also causes the files used by PBS/SGE (submission script, output, and error) # to remain on disk after the job is complete. Debug mode is disabled if # commented, but is uncommented by default in the sample config. debug = True # Check for WSGI compliance. #use_lint = False # Run the Python profiler on each request. #use_profile = False # Intercept print statements and show them on the returned page. #use_printdebug = True # Enable live debugging in your browser. This should NEVER be enabled on a # public site. Enabled in the sample config for development. use_interactive = False # Write thread status periodically to 'heartbeat.log', (careful, uses disk # space rapidly!). Useful to determine why your processes may be consuming a # lot of CPU. #use_heartbeat = False # Enable the memory debugging interface (careful, negatively impacts server # performance). #use_memdump = False # -- Data Libraries # These library upload options are described in much more detail in the wiki: # http://bitbucket.org/galaxy/galaxy-central/wiki/DataLibraries/UploadingFiles # Add an option to the library upload form which allows administrators to # upload a directory of files. library_import_dir = /users/galaxy/test # Add an option to the library upload form which allows authorized # non-administrators to upload a directory of files. The configured directory # must contain sub-directories named the same as the non-admin user's Galaxy # login ( email ). The non-admin user is restricted to uploading files or # sub-directories of files contained in their directory. #user_library_import_dir = None # Add an option to the admin library upload tool allowing admins to paste # filesystem paths to files and directories in a box, and these paths will be # added to a library. Set to True to enable. Please note the security # implication that this will give Galaxy Admins access to anything your Galaxy # user has access to. allow_library_path_paste = True # Users may choose to download multiple files from a library in an archive. By # default, Galaxy allows users to select from a few different archive formats # if testing shows that Galaxy is able to create files using these formats. # Specific formats can be disabled with this option, separate more than one # format with commas. Available formats are currently 'zip', 'gz', and 'bz2'. #disable_library_comptypes = # Some sequencer integration features in beta allow you to automatically # transfer datasets. This is done using a lightweight transfer manager which # runs outside of Galaxy (but is spawned by it automatically). Galaxy will # communicate with this manager over the port specified here. transfer_manager_port = 5672 # -- Users and Security # Galaxy encodes various internal values when these values will be output in # some format (for example, in a URL or cookie). You should set a key to be # used by the algorithm that encodes and decodes these values. It can be any # string. If left unchanged, anyone could construct a cookie that would grant # them access to others' sessions. id_secret = Myoldman1928327 # User authentication can be delegated to an upstream proxy server (usually # Apache). The upstream proxy should set a REMOTE_USER header in the request. # Enabling remote user disables regular logins. For more information, see: # http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ApacheProxy #use_remote_user = False # If use_remote_user is enabled and your external authentication # method just returns bare usernames, set a default mail domain to be appended # to usernames, to become your Galaxy usernames (email addresses). #remote_user_maildomain = None # If use_remote_user is enabled, you can set this to a URL that will log your # users out. #remote_user_logout_href = None # Administrative users - set this to a comma-separated list of valid Galaxy # users (email addresses). These users will have access to the Admin section # of the server, and will have access to create users, groups, roles, # libraries, and more. For more information, see: # http://bitbucket.org/galaxy/galaxy-central/wiki/Admin/AdminInterface admin_users = k.h.paszkiewicz@ex.ac.uk,test@gmail.com # Force everyone to log in (disable anonymous access). require_login = True # Allow unregistered users to create new accounts (otherwise, they will have to # be created by an admin). allow_user_creation = True # Email administrators when a new user account is created # You also need to have smtp_server set for this to work. # new_user_email_admin = False # Allow administrators to delete accounts. #allow_user_deletion = False # By default, users' data will be public, but setting this to True will cause # it to be private. Does not affect existing users and data, only ones created # after this option is set. Users may still change their default back to # public. new_user_dataset_access_role_default_private = True # -- Beta features # Enable Galaxy to communicate directly with a sequencer enable_sequencer_communication = False # Enable Galaxy's built-in visualization module, Trackster. enable_tracks = True # Enable Galaxy Pages. Pages are custom webpages that include embedded Galaxy items, # such as datasets, histories, workflows, and visualizations; pages are useful for # documenting and sharing multiple analyses or workflows. Pages are created using a # WYSIWYG editor that is very similar to a word processor. enable_pages = True # Enable authentication via OpenID. Allows users to log in to their Galaxy # account by authenticating with an OpenID provider. #enable_openid = False # Enable the (experimental! beta!) Web API. Documentation forthcoming. enable_api = True # Enable Galaxy's "Upload via FTP" interface. You'll need to install and # configure an FTP server (we've used ProFTPd since it can use Galaxy's # database for authentication) and set the following two options. # This should point to a directory containing subdirectories matching users' # email addresses, where Galaxy will look for files. #ftp_upload_dir = None # This should be the hostname of your FTP server, which will be provided to # users in the help text. #ftp_upload_site = None # -- Job Execution # If running multiple Galaxy processes, one can be designated as the job # runner. For more information, see: # http://bitbucket.org/galaxy/galaxy-central/wiki/Config/WebApplicationScaling enable_job_running = True # Should jobs be tracked through the database, rather than in memory. # Necessary if you're running the load balanced setup. track_jobs_in_database = True # This enables splitting of jobs into tasks, if specified by the particular tool config. # This is a new feature and not recommended for production servers yet. #use_tasked_jobs = False #local_task_queue_workers = 2 # Enable job recovery (if Galaxy is restarted while cluster jobs are running, # it can "recover" them when it starts). This is not safe to use if you are # running more than one Galaxy server using the same database. enable_job_recovery = True # Setting metadata on job outputs to in a separate process (or if using a # cluster, on the cluster). Thanks to Python's Global Interpreter Lock and the # hefty expense that setting metadata incurs, your Galaxy process may become # unresponsive when this operation occurs internally. #set_metadata_externally = False # Although it is fairly reliable, setting metadata can occasionally fail. In # these instances, you can choose to retry setting it internally or leave it in # a failed state (since retrying internally may cause the Galaxy process to be # unresponsive). If this option is set to False, the user will be given the # option to retry externally, or set metadata manually (when possible). #retry_metadata_internally = True # If (for example) you run on a cluster and your datasets (by default, # database/files/) are mounted read-only, this option will override tool output # paths to write outputs to the working directory instead, and the job manager # will move the outputs to their proper place in the dataset directory on the # Galaxy server after the job completes. outputs_to_working_directory = True # Number of concurrent jobs to run (local job runner) local_job_queue_workers = 3 # Jobs can be killed after a certain amount of execution time. Format is in # hh:mm:ss. Currently only implemented for PBS. #job_walltime = None # Jobs can be killed if any of their outputs grow over a certain size (in # bytes). 0 for no limit. #output_size_limit = 0 # Clustering Galaxy is not a straightforward process and requires some # pre-configuration. See the the wiki before attempting to set any of these # options: # http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster # Comma-separated list of job runners to start. local is always started. If # left commented, no jobs will be run on the cluster, even if a cluster URL is # explicitly defined in the [galaxy:tool_runners] section below. The runners # currently available are 'pbs' and 'drmaa'. start_job_runners = drmaa # The URL for the default runner to use when a tool doesn't explicitly define a # runner below. default_cluster_job_runner = drmaa:// -q serial.q/ # The cluster runners have their own thread pools used to prepare and finish # jobs (so that these sometimes lengthy operations do not block normal queue # operation). The value here is the number of worker threads available to each # started runner. cluster_job_queue_workers = 3 # These options are only used when using file staging with PBS. #pbs_application_server = #pbs_stage_path = #pbs_dataset_server = # ---- Tool Job Runners ----------------------------------------------------- # Individual per-tool job runner overrides. If not listed here, a tool will # run with the runner defined with default_cluster_job_runner. [galaxy:tool_runners] biomart = local:/// encode_db1 = local:/// hbvar = local:/// microbial_import1 = local:/// ucsc_table_direct1 = local:/// ucsc_table_direct_archaea1 = local:/// ucsc_table_direct_test1 = local:/// upload1 = local:/// biomart = local:/// encode_db1 = local:/// hbvar = local:/// microbial_import1 = local:/// ucsc_table_direct1 = local:/// # ---- Galaxy Message Queue ------------------------------------------------- # Galaxy uses AMQ protocol to receive messages from external sources like # bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation. # For Galaxy to receive messages from a message queue the RabbitMQ server has # to be set up with a user account and other parameters listed below. The 'host' # and 'port' fields should point to where the RabbitMQ server is running. [galaxy_amqp] host = zeus.ex.ac.uk port = 5672 userid = galaxy password = galaxy virtual_host = galaxy_messaging_engine queue = galaxy_queue exchange = galaxy_exchange routing_key = bar_code_scanner rabbitmqctl_path = /users/galaxy/rabbitmq_server-2.2.0/ api_key = c43c7605ba60633a2ed60f46c12f354e