galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: greg: Another fix for the ToolDataTableManager for handling configured directories.
by Bitbucket 14 Jun '12
by Bitbucket 14 Jun '12
14 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ea2fbfe6d9db/
changeset: ea2fbfe6d9db
user: greg
date: 2012-06-14 22:26:13
summary: Another fix for the ToolDataTableManager for handling configured directories.
affected #: 1 file
diff -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f -r ea2fbfe6d9db5b9eac1830a7e9de1aea0dcf5806 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1151,7 +1151,10 @@
error = False
message = ''
try:
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename, app.config.tool_data_table_config_path, persist=persist )
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
+ tool_data_path=app.config.tool_data_path,
+ tool_data_table_config_path=app.config.tool_data_table_config_path,
+ persist=persist )
except Exception, e:
message = str( e )
error = True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fixes for recent changes to fix ToolDataTableManager to handle defined subdirectory locations in the Galaxy config.
by Bitbucket 14 Jun '12
by Bitbucket 14 Jun '12
14 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6498eb3e409a/
changeset: 6498eb3e409a
user: greg
date: 2012-06-14 21:46:36
summary: Fixes for recent changes to fix ToolDataTableManager to handle defined subdirectory locations in the Galaxy config.
affected #: 6 files
diff -r 19ed55acf0da8e8ed24f805035defab3009eec4f -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -74,7 +74,7 @@
# Genomes
self.genomes = Genomes( self )
# Tool data tables
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path )
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path, self.config.tool_data_table_config_path )
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
if self.config.migrated_tools_config not in tool_configs:
diff -r 19ed55acf0da8e8ed24f805035defab3009eec4f -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -33,7 +33,7 @@
self.app = app
self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
self.sa_session = app.model.context.current
- tdtman = ToolDataTableManager()
+ tdtman = ToolDataTableManager( app.config.tool_data_path )
xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
for node in xmltree:
table = node.get('name')
diff -r 19ed55acf0da8e8ed24f805035defab3009eec4f -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f lib/galaxy/jobs/deferred/liftover_transfer.py
--- a/lib/galaxy/jobs/deferred/liftover_transfer.py
+++ b/lib/galaxy/jobs/deferred/liftover_transfer.py
@@ -14,7 +14,6 @@
from galaxy.tools.parameters import visit_input_values
from galaxy.tools.parameters.basic import DataToolParameter
-from galaxy.tools.data import ToolDataTableManager
from galaxy.datatypes.checkers import *
diff -r 19ed55acf0da8e8ed24f805035defab3009eec4f -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -13,13 +13,14 @@
class ToolDataTableManager( object ):
"""Manages a collection of tool data tables"""
- def __init__( self, config_filename=None ):
+ def __init__( self, tool_data_path, config_filename=None ):
+ self.tool_data_path = tool_data_path
self.data_tables = {}
# Store config elements for on-the-fly persistence.
self.data_table_elems = []
self.data_table_elem_names = []
if config_filename:
- self.load_from_config_file( config_filename )
+ self.load_from_config_file( config_filename, self.tool_data_path )
def __getitem__( self, key ):
return self.data_tables.__getitem__( key )
def __contains__( self, key ):
diff -r 19ed55acf0da8e8ed24f805035defab3009eec4f -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -66,7 +66,7 @@
if gitd:
destination = None
- tdtman = ToolDataTableManager()
+ tdtman = ToolDataTableManager( app.config.tool_data_path )
xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
for node in xmltree:
table = node.get('name')
diff -r 19ed55acf0da8e8ed24f805035defab3009eec4f -r 6498eb3e409adb8ce71241f8e3b94bb23891f30f lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -37,7 +37,7 @@
# Tag handler
self.tag_handler = CommunityTagHandler()
# Tool data tables - never pass a config file here because the tool shed should always have an empty dictionary!
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager()
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path )
# The tool shed has no toolbox, but this attribute is still required.
self.toolbox = None
# Load security policy
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Refactoring and bug fixes for visualization framework.
by Bitbucket 14 Jun '12
by Bitbucket 14 Jun '12
14 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/19ed55acf0da/
changeset: 19ed55acf0da
user: jgoecks
date: 2012-06-14 21:00:30
summary: Refactoring and bug fixes for visualization framework.
affected #: 3 files
diff -r b761471d759097f03c0fb904e9543f02ed5096c9 -r 19ed55acf0da8e8ed24f805035defab3009eec4f lib/galaxy/web/api/tools.py
--- a/lib/galaxy/web/api/tools.py
+++ b/lib/galaxy/web/api/tools.py
@@ -1,12 +1,11 @@
-from galaxy import config, tools, web, util
-from galaxy.web.base.controller import BaseController, BaseAPIController, UsesHistoryDatasetAssociationMixin, messages, get_highest_priority_msg
-from galaxy.util.bunch import Bunch
+from galaxy import web, util
+from galaxy.web.base.controller import BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesVisualizationMixin, messages, get_highest_priority_msg
from galaxy.visualization.tracks.visual_analytics import get_dataset_job
from galaxy.visualization.genomes import GenomeRegion
from galaxy.util.json import to_json_string, from_json_string
from galaxy.visualization.tracks.data_providers import *
-class ToolsController( BaseAPIController, UsesHistoryDatasetAssociationMixin ):
+class ToolsController( BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesVisualizationMixin ):
"""
RESTful controller for interactions with tools.
"""
@@ -160,9 +159,12 @@
if not tool:
return messages.NO_TOOL
tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
+
+ # TODO: rather than set new inputs using dict of json'ed value, unpack parameters and set using set_param_value below.
# TODO: need to handle updates to conditional parameters; conditional
# params are stored in dicts (and dicts within dicts).
- tool_params.update( dict( [ ( key, value ) for key, value in kwargs.items() if key in tool.inputs ] ) )
+ new_inputs = payload[ 'inputs' ]
+ tool_params.update( dict( [ ( key, to_json_string( value ) ) for key, value in new_inputs.items() if key in tool.inputs and new_inputs[ key ] is not None ] ) )
tool_params = tool.params_from_strings( tool_params, self.app )
#
@@ -355,5 +357,7 @@
for joda in subset_job.output_datasets:
if joda.name == output_name:
output_dataset = joda.dataset
-
- return output_dataset.get_api_value()
+
+ dataset_dict = output_dataset.get_api_value()
+ dataset_dict[ 'track_config' ] = self.get_new_track_config( trans, output_dataset );
+ return dataset_dict
diff -r b761471d759097f03c0fb904e9543f02ed5096c9 -r 19ed55acf0da8e8ed24f805035defab3009eec4f lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -542,6 +542,32 @@
return config
+ def get_new_track_config( self, trans, dataset ):
+ """
+ Returns track configuration dict for a dataset.
+ """
+ # Get data provider.
+ track_type, _ = dataset.datatype.get_track_type()
+ track_data_provider_class = get_data_provider( original_dataset=dataset )
+ track_data_provider = track_data_provider_class( original_dataset=dataset )
+
+ if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ):
+ hda_ldda = "hda"
+ elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ):
+ hda_ldda = "ldda"
+
+ # Get track definition.
+ return {
+ "track_type": track_type,
+ "name": dataset.name,
+ "hda_ldda": hda_ldda,
+ "dataset_id": trans.security.encode_id( dataset.id ),
+ "prefs": {},
+ "filters": { 'filters' : track_data_provider.get_filters() },
+ "tool": get_tool_def( trans, dataset ),
+ "tool_state": {}
+ }
+
def get_hda_or_ldda( self, trans, hda_ldda, dataset_id ):
""" Returns either HDA or LDDA for hda/ldda and id combination. """
if hda_ldda == "hda":
diff -r b761471d759097f03c0fb904e9543f02ed5096c9 -r 19ed55acf0da8e8ed24f805035defab3009eec4f lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -186,22 +186,7 @@
dataset_id = ldda_id
dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
- # Get data provider.
- track_type, _ = dataset.datatype.get_track_type()
- track_data_provider_class = get_data_provider( original_dataset=dataset )
- track_data_provider = track_data_provider_class( original_dataset=dataset )
-
- # Get track definition.
- return {
- "track_type": track_type,
- "name": dataset.name,
- "hda_ldda": hda_ldda,
- "dataset_id": trans.security.encode_id( dataset.id ),
- "prefs": {},
- "filters": { 'filters' : track_data_provider.get_filters() },
- "tool": get_tool_def( trans, dataset ),
- "tool_state": {}
- }
+ return self.get_new_track_config( trans, dataset )
@web.json
def bookmarks_from_dataset( self, trans, hda_id=None, ldda_id=None ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix the DataTableManager to handle directories other than ~/tool-data.
by Bitbucket 14 Jun '12
by Bitbucket 14 Jun '12
14 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b761471d7590/
changeset: b761471d7590
user: greg
date: 2012-06-14 20:33:21
summary: Fix the DataTableManager to handle directories other than ~/tool-data.
affected #: 4 files
diff -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e -r b761471d759097f03c0fb904e9543f02ed5096c9 lib/galaxy/jobs/deferred/genome_transfer.py
--- a/lib/galaxy/jobs/deferred/genome_transfer.py
+++ b/lib/galaxy/jobs/deferred/genome_transfer.py
@@ -34,7 +34,7 @@
self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
self.sa_session = app.model.context.current
tdtman = ToolDataTableManager()
- xmltree = tdtman.load_from_config_file(app.config.tool_data_table_config_path)
+ xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
for node in xmltree:
table = node.get('name')
location = node.findall('file')[0].get('path')
diff -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e -r b761471d759097f03c0fb904e9543f02ed5096c9 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -24,7 +24,7 @@
return self.data_tables.__getitem__( key )
def __contains__( self, key ):
return self.data_tables.__contains__( key )
- def load_from_config_file( self, config_filename ):
+ def load_from_config_file( self, config_filename, tool_data_path ):
tree = util.parse_xml( config_filename )
root = tree.getroot()
table_elems = []
@@ -36,12 +36,12 @@
if table_elem_name and table_elem_name not in self.data_table_elem_names:
self.data_table_elem_names.append( table_elem_name )
self.data_table_elems.append( table_elem )
- table = tool_data_table_types[ type ]( table_elem )
+ table = tool_data_table_types[ type ]( table_elem, tool_data_path )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Loaded tool data table '%s'", table.name )
return table_elems
- def add_new_entries_from_config_file( self, config_filename, tool_data_table_config_path, persist=False ):
+ def add_new_entries_from_config_file( self, config_filename, tool_data_path, tool_data_table_config_path, persist=False ):
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example:
@@ -64,7 +64,7 @@
# Make a copy of the current list of data_table_elem_names so we can persist later if changes to the config file are necessary.
original_data_table_elem_names = [ name for name in self.data_table_elem_names ]
if root.tag == 'tables':
- table_elems = self.load_from_config_file( config_filename )
+ table_elems = self.load_from_config_file( config_filename, tool_data_path )
else:
table_elems = []
type = root.get( 'type', 'tabular' )
@@ -74,7 +74,7 @@
if table_elem_name and table_elem_name not in self.data_table_elem_names:
self.data_table_elem_names.append( table_elem_name )
self.data_table_elems.append( root )
- table = tool_data_table_types[ type ]( root )
+ table = tool_data_table_types[ type ]( root, tool_data_path )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Added new tool data table '%s'", table.name )
@@ -97,8 +97,9 @@
os.chmod( full_path, 0644 )
class ToolDataTable( object ):
- def __init__( self, config_element ):
+ def __init__( self, config_element, tool_data_path ):
self.name = config_element.get( 'name' )
+ self.tool_data_path = tool_data_path
self.missing_index_file = None
class TabularToolDataTable( ToolDataTable ):
@@ -115,8 +116,8 @@
type_key = 'tabular'
- def __init__( self, config_element ):
- super( TabularToolDataTable, self ).__init__( config_element )
+ def __init__( self, config_element, tool_data_path ):
+ super( TabularToolDataTable, self ).__init__( config_element, tool_data_path )
self.configure_and_load( config_element )
def configure_and_load( self, config_element ):
"""
@@ -128,11 +129,24 @@
self.parse_column_spec( config_element )
# Read every file
all_rows = []
+ found = False
for file_element in config_element.findall( 'file' ):
filename = file_element.get( 'path' )
if os.path.exists( filename ):
+ found = True
all_rows.extend( self.parse_file_fields( open( filename ) ) )
else:
+ # Since the path attribute can include a hard-coded path to a specific directory
+ # (e.g., <file path="tool-data/cg_crr_files.loc" />) which may not be the same value
+ # as self.tool_data_path, we'll parse the path to get the filename and see if it is
+ # in self.tool_data_path.
+ file_path, file_name = os.path.split( filename )
+ if file_path and file_path != self.tool_data_path:
+ corrected_filename = os.path.join( self.tool_data_path, file_name )
+ if os.path.exists( corrected_filename ):
+ found = True
+ all_rows.extend( self.parse_file_fields( open( corrected_filename ) ) )
+ if not found:
self.missing_index_file = filename
log.warn( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
self.data = all_rows
diff -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e -r b761471d759097f03c0fb904e9543f02ed5096c9 lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -67,7 +67,7 @@
if gitd:
destination = None
tdtman = ToolDataTableManager()
- xmltree = tdtman.load_from_config_file(app.config.tool_data_table_config_path)
+ xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
for node in xmltree:
table = node.get('name')
location = node.findall('file')[0].get('path')
diff -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e -r b761471d759097f03c0fb904e9543f02ed5096c9 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -427,12 +427,12 @@
if datatypes_config:
metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+ if sample_files:
+ trans.app.config.tool_data_path = work_dir
# Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
if 'tool_data_table_conf.xml.sample' in sample_files:
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- if sample_files:
- trans.app.config.tool_data_path = work_dir
for filename in ctx:
# Find all tool configs.
ctx_file_name = strip_path( filename )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Add the ability to browse the installation directory of tool dependencies for installed tool shed repositories.
by Bitbucket 14 Jun '12
by Bitbucket 14 Jun '12
14 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bef69fd3fcc1/
changeset: bef69fd3fcc1
user: greg
date: 2012-06-14 16:00:51
summary: Add the ability to browse the installation directory of tool dependencies for installed tool shed repositories.
affected #: 10 files
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2740,7 +2740,13 @@
self.version = version
self.type = type
self.uninstalled = uninstalled
-
+ def installation_directory( self, app ):
+ return os.path.join( app.config.tool_dependency_dir,
+ self.name,
+ self.version,
+ self.tool_shed_repository.owner,
+ self.tool_shed_repository.name,
+ self.installed_changeset_revision )
class ToolVersion( object ):
def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
self.id = id
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -41,7 +41,7 @@
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
-def get_install_dir( app, repository, installed_changeset_revision, package_name, package_version ):
+def get_tool_dependency_install_dir( app, repository, installed_changeset_revision, package_name, package_version ):
return os.path.abspath( os.path.join( app.config.tool_dependency_dir,
package_name,
package_version,
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -46,7 +46,7 @@
if installed_tool_dependencies:
for installed_dependency in installed_tool_dependencies:
if not installed_dependency.uninstalled:
- if installed_dependency.name == name and installed_dependency.version == version and installed_dependency.type == type:
+ if installed_dependency.name==name and installed_dependency.version==version and installed_dependency.type==type:
break
for base_path in self.base_paths:
if installed_dependency:
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -956,6 +956,9 @@
file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] )
if shed_tool_conf == file_name:
return index, shed_tool_conf_dict
+def get_tool_dependency( trans, id ):
+ """Get a tool_dependency from the database via id"""
+ return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
def get_tool_panel_config_tool_path_install_dir( app, repository ):
# Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
# repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -2,7 +2,7 @@
from galaxy.web.controllers.admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
-from galaxy.tool_shed.tool_dependencies.install_util import get_install_dir, not_installed
+from galaxy.tool_shed.tool_dependencies.install_util import get_tool_dependency_install_dir, not_installed
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -148,6 +148,19 @@
return self.repository_list_grid( trans, **kwd )
@web.expose
@web.require_admin
+ def browse_tool_dependency( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
+ repository = get_repository( trans, kwd[ 'repository_id' ] )
+ return trans.fill_template( '/admin/tool_shed_repository/browse_tool_dependency.mako',
+ repository=repository,
+ tool_dependency=tool_dependency,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def browse_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
@@ -256,6 +269,7 @@
url = '%srepository/find_workflows?galaxy_url=%s&webapp=galaxy&no_reset=true' % ( tool_shed_url, galaxy_url )
return trans.response.send_redirect( url )
@web.json
+ @web.require_admin
def get_file_contents( self, trans, file_path ):
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
@@ -341,7 +355,7 @@
for dependency_key, requirements_dict in tool_dependencies.items():
name = requirements_dict[ 'name' ]
version = requirements_dict[ 'version' ]
- install_dir = get_install_dir( trans.app, repository, repository.changeset_revision, name, version )
+ install_dir = get_tool_dependency_install_dir( trans.app, repository, repository.changeset_revision, name, version )
if not_installed( install_dir ):
filtered_tool_dependencies[ dependency_key ] = requirements_dict
tool_dependencies = filtered_tool_dependencies
@@ -585,6 +599,7 @@
message=message,
status=status )
@web.json
+ @web.require_admin
def open_folder( self, trans, folder_path ):
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e templates/admin/tool_shed_repository/browse_repository.mako
--- a/templates/admin/tool_shed_repository/browse_repository.mako
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -10,7 +10,7 @@
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "ui.core", "jquery.dynatree" )}
- ${common_javascripts(repository)}
+ ${common_javascripts(repository.name, repository.repo_files_directory(trans.app))}
</%def><br/><br/>
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e templates/admin/tool_shed_repository/browse_tool_dependency.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -0,0 +1,62 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "dynatree_skin/ui.dynatree" )}
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "ui.core", "jquery.dynatree" )}
+ ${common_javascripts(tool_dependency.name, tool_dependency.installation_directory( trans.app ))}
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+ <li><a class="action-button" id="tool_dependency-${tool_dependency.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="tool_dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ %endif
+ </div>
+</ul>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">Browse tool dependency ${tool_dependency.name} installation directory</div>
+ <div class="toolFormBody">
+ <div class="form-row" >
+ <label>Tool shed repository:</label>
+ ${repository.name}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row" >
+ <label>Tool shed repository changeset revision:</label>
+ ${repository.changeset_revision}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row" >
+ <label>Tool dependency installation directory:</label>
+ ${tool_dependency.installation_directory( trans.app )}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row" >
+ <label>Contents:</label>
+ <div id="tree" >
+ Loading...
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>
+ </div>
+ </div>
+</div>
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -1,4 +1,4 @@
-<%def name="common_javascripts(repository)">
+<%def name="common_javascripts(title_text, directory_path)"><script type="text/javascript">
$(function(){
$("#tree").ajaxComplete(function(event, XMLHttpRequest, ajaxOptions) {
@@ -6,7 +6,7 @@
});
// --- Initialize sample trees
$("#tree").dynatree({
- title: "${repository.name}",
+ title: "${title_text}",
rootVisible: true,
minExpandLevel: 0, // 1: root node is not collapsible
persist: false,
@@ -22,7 +22,7 @@
// initAjax is hard to fake, so we pass the children as object array:
initAjax: {url: "${h.url_for( controller='admin_toolshed', action='open_folder' )}",
dataType: "json",
- data: { folder_path: "${repository.repo_files_directory(trans.app)}" },
+ data: { folder_path: "${directory_path}" },
},
onLazyRead: function(dtnode){
dtnode.appendAjax({
@@ -43,7 +43,7 @@
var cell = $("#file_contents");
var selected_value;
if (dtnode.data.key == 'root') {
- selected_value = "${repository.repo_files_directory(trans.app)}/";
+ selected_value = "${directory_path}/";
} else {
selected_value = dtnode.data.key;
};
@@ -66,221 +66,3 @@
});
</script></%def>
-
-<%def name="render_repository_contents( repository, can_reset_metadata=False )">
- <div class="toolForm">
- <div class="toolFormTitle">${repository.name}</div>
- <div class="toolFormBody">
- <%
- metadata = repository.metadata
- missing_tool_dependencies = repository.missing_tool_dependencies
- installed_tool_dependencies = repository.installed_tool_dependencies
- %>
- %if missing_tool_dependencies:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Missing tool dependencies</i></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>type</b></td>
- <td><b>version</b></td>
- </tr>
- %for name, requirements_dict in missing_tool_dependencies.items():
- <tr>
- <td>${requirements_dict[ 'name' ]}</td>
- <td>${requirements_dict[ 'type' ]}</td>
- <td>${requirements_dict[ 'version' ]}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if installed_tool_dependencies:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Installed tool dependencies</i></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>type</b></td>
- <td><b>version</b></td>
- </tr>
- %for installed_tool_dependency in installed_tool_dependencies:
- <tr>
- <td>${installed_tool_dependency.name}</td>
- <td>${installed_tool_dependency.type}</td>
- <td>${installed_tool_dependency.version}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'tools' in metadata:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Tools</b><i> - click the name to view information about the tool</i></td>
- </tr>
- </table>
- </div>
- <div class="form-row">
- <% tool_dicts = metadata[ 'tools' ] %>
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>description</b></td>
- <td><b>version</b></td>
- <td><b>requirements</b></td>
- </tr>
- %for tool_dict in tool_dicts:
- <tr>
- <td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=tool_dict[ 'id' ] )}">
- ${tool_dict[ 'name' ]}
- </a>
- </td>
- <td>${tool_dict[ 'description' ]}</td>
- <td>${tool_dict[ 'version' ]}</td>
- <td>
- <%
- if 'requirements' in tool_dict:
- requirements = tool_dict[ 'requirements' ]
- else:
- requirements = None
- %>
- %if requirements:
- <%
- requirements_str = ''
- for requirement_dict in tool_dict[ 'requirements' ]:
- requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
- requirements_str = requirements_str.rstrip( ', ' )
- %>
- ${requirements_str}
- %else:
- none
- %endif
- </td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'workflows' in metadata:
- ## metadata[ 'workflows' ] is a list of tuples where each contained tuple is
- ## [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Workflows</b><i> - click the name to import</i></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <% workflow_tups = metadata[ 'workflows' ] %>
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>steps</b></td>
- <td><b>format-version</b></td>
- <td><b>annotation</b></td>
- </tr>
- <% index = 0 %>
- %for workflow_tup in workflow_tups:
- <%
- import os.path
- relative_path = workflow_tup[ 0 ]
- full_path = os.path.abspath( relative_path )
- workflow_dict = workflow_tup[ 1 ]
- workflow_name = workflow_dict[ 'name' ]
- ## Initially steps were not stored in the metadata record.
- steps = workflow_dict.get( 'steps', [] )
- format_version = workflow_dict[ 'format-version' ]
- annotation = workflow_dict[ 'annotation' ]
- %>
- <tr>
- <td>
- <div class="menubutton" style="float: left;" id="workflow-${index}-popup">
- ${workflow_name}
- <div popupmenu="workflow-${index}-popup">
- <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', installed_repository_file=full_path, repository_id=trans.security.encode_id( repository.id ) )}">Import to Galaxy</a>
- </div>
- </div>
- </td>
- <td>
- %if steps:
- ${len( steps )}
- %else:
- unknown
- %endif
- </td>
- <td>${format_version}</td>
- <td>${annotation}</td>
- </tr>
- <% index += 1 %>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'datatypes' in metadata:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Data types</b></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <% datatypes_dicts = metadata[ 'datatypes' ] %>
- <table class="grid">
- <tr>
- <td><b>extension</b></td>
- <td><b>type</b></td>
- <td><b>mimetype</b></td>
- <td><b>subclass</b></td>
- </tr>
- %for datatypes_dict in datatypes_dicts:
- <tr>
- <td>${datatypes_dict.get( 'extension', ' ' )}</td>
- <td>${datatypes_dict.get( 'dtype', ' ' )}</td>
- <td>${datatypes_dict.get( 'mimetype', ' ' )}</td>
- <td>${datatypes_dict.get( 'subclass', ' ' )}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if can_reset_metadata:
- <form name="set_metadata" action="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post">
- <div class="form-row">
- <div style="float: left; width: 250px; margin-right: 10px;">
- <input type="submit" name="set_metadata_button" value="Reset metadata"/>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Inspect the repository and reset the above attributes.
- </div>
- </div>
- </form>
- %endif
- </div>
- </div>
-</%def>
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -64,5 +64,223 @@
</div></div><p/>
-${render_repository_contents( repository, can_reset_metadata=True )}
+<div class="toolForm">
+ <div class="toolFormTitle">${repository.name}</div>
+ <div class="toolFormBody">
+ <%
+ metadata = repository.metadata
+ missing_tool_dependencies = repository.missing_tool_dependencies
+ installed_tool_dependencies = repository.installed_tool_dependencies
+ %>
+ %if missing_tool_dependencies:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Missing tool dependencies</i></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>type</b></td>
+ <td><b>version</b></td>
+ </tr>
+ %for name, requirements_dict in missing_tool_dependencies.items():
+ <tr>
+ <td>${requirements_dict[ 'name' ]}</td>
+ <td>${requirements_dict[ 'type' ]}</td>
+ <td>${requirements_dict[ 'version' ]}</td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if installed_tool_dependencies:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Installed tool dependencies<i> - click the name to browse the dependency installation directory</i></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>type</b></td>
+ <td><b>version</b></td>
+ </tr>
+ %for installed_tool_dependency in installed_tool_dependencies:
+ <tr>
+ <td>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${installed_tool_dependency.name}
+ </a>
+ </td>
+ <td>${installed_tool_dependency.type}</td>
+ <td>${installed_tool_dependency.version}</td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'tools' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Tools</b><i> - click the name to view information about the tool</i></td>
+ </tr>
+ </table>
+ </div>
+ <div class="form-row">
+ <% tool_dicts = metadata[ 'tools' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>description</b></td>
+ <td><b>version</b></td>
+ <td><b>requirements</b></td>
+ </tr>
+ %for tool_dict in tool_dicts:
+ <tr>
+ <td>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=tool_dict[ 'id' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </td>
+ <td>${tool_dict[ 'description' ]}</td>
+ <td>${tool_dict[ 'version' ]}</td>
+ <td>
+ <%
+ if 'requirements' in tool_dict:
+ requirements = tool_dict[ 'requirements' ]
+ else:
+ requirements = None
+ %>
+ %if requirements:
+ <%
+ requirements_str = ''
+ for requirement_dict in tool_dict[ 'requirements' ]:
+ requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirements_str = requirements_str.rstrip( ', ' )
+ %>
+ ${requirements_str}
+ %else:
+ none
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'workflows' in metadata:
+ ## metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+ ## [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Workflows</b><i> - click the name to import</i></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% workflow_tups = metadata[ 'workflows' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>steps</b></td>
+ <td><b>format-version</b></td>
+ <td><b>annotation</b></td>
+ </tr>
+ <% index = 0 %>
+ %for workflow_tup in workflow_tups:
+ <%
+ import os.path
+ relative_path = workflow_tup[ 0 ]
+ full_path = os.path.abspath( relative_path )
+ workflow_dict = workflow_tup[ 1 ]
+ workflow_name = workflow_dict[ 'name' ]
+ ## Initially steps were not stored in the metadata record.
+ steps = workflow_dict.get( 'steps', [] )
+ format_version = workflow_dict[ 'format-version' ]
+ annotation = workflow_dict[ 'annotation' ]
+ %>
+ <tr>
+ <td>
+ <div class="menubutton" style="float: left;" id="workflow-${index}-popup">
+ ${workflow_name}
+ <div popupmenu="workflow-${index}-popup">
+ <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', installed_repository_file=full_path, repository_id=trans.security.encode_id( repository.id ) )}">Import to Galaxy</a>
+ </div>
+ </div>
+ </td>
+ <td>
+ %if steps:
+ ${len( steps )}
+ %else:
+ unknown
+ %endif
+ </td>
+ <td>${format_version}</td>
+ <td>${annotation}</td>
+ </tr>
+ <% index += 1 %>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'datatypes' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Data types</b></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% datatypes_dicts = metadata[ 'datatypes' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>extension</b></td>
+ <td><b>type</b></td>
+ <td><b>mimetype</b></td>
+ <td><b>subclass</b></td>
+ </tr>
+ %for datatypes_dict in datatypes_dicts:
+ <tr>
+ <td>${datatypes_dict.get( 'extension', ' ' )}</td>
+ <td>${datatypes_dict.get( 'dtype', ' ' )}</td>
+ <td>${datatypes_dict.get( 'mimetype', ' ' )}</td>
+ <td>${datatypes_dict.get( 'subclass', ' ' )}</td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if can_reset_metadata:
+ <form name="set_metadata" action="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post">
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="submit" name="set_metadata_button" value="Reset metadata"/>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Inspect the repository and reset the above attributes.
+ </div>
+ </div>
+ </form>
+ %endif
+ </div>
+</div><p/>
diff -r b1f35669f93c4163cf803d64beb37a1cb0d5381c -r bef69fd3fcc191b9a9a4d140876a6ee932e9fc3e templates/admin/tool_shed_repository/manage_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
@@ -24,7 +24,7 @@
%endif
<div class="toolForm">
- <div class="toolFormTitle">${repository.name} repository's installed tool dependencies</div>
+ <div class="toolFormTitle">${repository.name} repository's tool dependencies</div><div class="toolFormBody"><div class="form-row"><table class="grid">
@@ -45,7 +45,18 @@
<tr><td bgcolor="#D8D8D8"><b>Name</b></td><td bgcolor="#D8D8D8">${name}</td></tr><tr><th>Version</th><td>${version}</td></tr><tr><th>Type</th><td>${type}</td></tr>
- <tr><th>Install directory</th><td>${install_dir}</td></tr>
+ <tr>
+ <th>Install directory</th>
+ <td>
+ %if uninstalled:
+ This dependency is not currently installed
+ %else:
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${install_dir}
+ </a>
+ %endif
+ </td>
+ </tr><tr><th>Installed changeset revision</th><td>${installed_changeset_revision}</td></tr><tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
%endfor
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b1f35669f93c/
changeset: b1f35669f93c
user: greg
date: 2012-06-13 22:40:22
summary: Add baseline support for managin dependencies for tools included in installed tool shed repositories, enhance installed repository browsing to enable browsing files, and a bit of code cleanup.
affected #: 12 files
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2665,7 +2665,12 @@
self.deleted = deleted
self.uninstalled = uninstalled
self.dist_to_shed = dist_to_shed
- def repo_path( self, app ):
+ def repo_files_directory( self, app ):
+ repo_path = self.repo_path( app )
+ if repo_path:
+ return os.path.join( app.config.root, repo_path, self.name )
+ return None
+ def repo_path( self, app ):
tool_shed_url = self.tool_shed
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
@@ -2673,7 +2678,7 @@
tool_shed = tool_shed_url.rstrip( '/' )
for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ relative_path = os.path.join( app.config.root, tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
if os.path.exists( relative_path ):
return relative_path
return None
@@ -2686,6 +2691,46 @@
@property
def includes_workflows( self ):
return self.metadata and 'workflows' in self.metadata
+ @property
+ def installed_tool_dependencies( self ):
+ """Return the repository's tool dependencies that are currently installed."""
+ installed_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if not tool_dependency.uninstalled:
+ installed_dependencies.append( tool_dependency )
+ return installed_dependencies
+ @property
+ def missing_tool_dependencies( self ):
+ """Return the repository's tool dependencies that are not currently installed."""
+ def add_missing_dependency( missing_dependencies_dict, name, version, type, installed_changeset_revision=None ):
+ missing_dependencies_dict[ name ] = dict( version=version,
+ type=type,
+ installed_changeset_revision=installed_changeset_revision )
+ return missing_dependencies_dict
+ missing_dependencies = {}
+ # Get the dependency information from the metadata for comparison against the installed tool dependencies.
+ tool_dependencies = self.metadata.get( 'tool_dependencies', None )
+ if tool_dependencies:
+ for dependency_key, requirements_dict in tool_dependencies.items():
+ name = requirements_dict[ 'name' ]
+ version = requirements_dict[ 'version' ]
+ type = requirements_dict[ 'type' ]
+ if self.tool_dependencies:
+ found = False
+ for installed_dependency in self.tool_dependencies:
+ if installed_dependency.name==name and installed_dependency.version==version and installed_dependency.type==type:
+ found = True
+ if installed_dependency.uninstalled:
+ missing_dependencies = add_missing_dependency( missing_dependencies,
+ installed_dependency.name,
+ installed_dependency.version,
+ installed_dependency.type,
+ installed_dependency.installed_changeset_revision )
+ break
+ if not found:
+ missing_dependencies = add_missing_dependency( missing_dependencies, name, version, type )
+ return missing_dependencies
+ return None
class ToolDependency( object ):
def __init__( self, tool_shed_repository_id=None, installed_changeset_revision=None, name=None, version=None, type=None, uninstalled=False ):
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -795,19 +795,15 @@
return tool_version.get_version_ids( self.app )
return []
@property
- def installed_tool_dependencies( self ):
- # If this tool is included in an installed tool shed repository and tool dependencies were installed along with the
- # tool shed repository, then this method will return the repository's ToolDependency records.
- if self.app.config.use_tool_dependencies:
- if self.tool_shed:
- tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
- if tool_shed_repository:
- return tool_shed_repository.tool_dependencies
- return None
+ def tool_shed_repository( self ):
+ # If this tool is included in an installed tool shed repository, return it.
+ if self.tool_shed:
+ return get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
+ return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
available_configs = []
@@ -2333,6 +2329,10 @@
environment to include this tools requirements.
"""
commands = []
+ if self.tool_shed_repository:
+ installed_tool_dependencies = self.tool_shed_repository.tool_dependencies
+ else:
+ installed_tool_dependencies = None
for requirement in self.requirements:
# TODO: currently only supporting requirements of type package,
# need to implement some mechanism for mapping other types
@@ -2342,7 +2342,7 @@
script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
version=requirement.version,
type=requirement.type,
- installed_tool_dependencies=self.installed_tool_dependencies )
+ installed_tool_dependencies=installed_tool_dependencies )
if script_file is None and base_path is None:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
elif script_file is None:
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -29,6 +29,7 @@
'"' : '"',
'&' : '&',
'\'' : ''' }
+MAX_CONTENT_SIZE = 32768
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
@@ -881,6 +882,37 @@
fh.close()
return tmp_filename
return None
+def get_repository_file_contents( file_path ):
+ if is_gzip( file_path ):
+ to_html = to_html_str( '\ngzip compressed file\n' )
+ elif is_bz2( file_path ):
+ to_html = to_html_str( '\nbz2 compressed file\n' )
+ elif check_zip( file_path ):
+ to_html = to_html_str( '\nzip compressed file\n' )
+ elif check_binary( file_path ):
+ to_html = to_html_str( '\nBinary file\n' )
+ else:
+ to_html = ''
+ for i, line in enumerate( open( file_path ) ):
+ to_html = '%s%s' % ( to_html, to_html_str( line ) )
+ if len( to_html ) > MAX_CONTENT_SIZE:
+ large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
+ to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
+ break
+ return to_html
+def get_repository_files( trans, folder_path ):
+ contents = []
+ for item in os.listdir( folder_path ):
+ # Skip .hg directories
+ if str( item ).startswith( '.hg' ):
+ continue
+ if os.path.isdir( os.path.join( folder_path, item ) ):
+ # Append a '/' character so that our jquery dynatree will function properly.
+ item = '%s/' % item
+ contents.append( item )
+ if contents:
+ contents.sort()
+ return contents
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -1313,6 +1345,27 @@
if not os.path.exists( work_dir ):
os.makedirs( work_dir )
return work_dir
+def open_repository_files_folder( trans, folder_path ):
+ try:
+ files_list = get_repository_files( trans, folder_path )
+ except OSError, e:
+ if str( e ).find( 'No such file or directory' ) >= 0:
+ # We have a repository with no contents.
+ return []
+ folder_contents = []
+ for filename in files_list:
+ is_folder = False
+ if filename and filename[-1] == os.sep:
+ is_folder = True
+ if filename:
+ full_path = os.path.join( folder_path, filename )
+ node = { "title": filename,
+ "isFolder": is_folder,
+ "isLazy": is_folder,
+ "tooltip": full_path,
+ "key": full_path }
+ folder_contents.append( node )
+ return folder_contents
def panel_entry_per_tool( tool_section_dict ):
# Return True if tool_section_dict looks like this.
# {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -174,40 +174,6 @@
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
- def check_installed_tool_dependencies( self, trans, repository_id, relative_install_dir ):
- """See if any tool dependencies need to be installed."""
- tool_dependencies_missing = False
- repository = get_repository( trans, repository_id )
- if repository.includes_tool_dependencies:
- # Get the tool_dependencies.xml file from the repository.
- work_dir = make_tmp_directory()
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
- # Parse the tool_dependencies.xml config.
- tree = ElementTree.parse( tool_dependencies_config )
- root = tree.getroot()
- ElementInclude.include( root )
- fabric_version_checked = False
- for elem in root:
- if elem.tag == 'package':
- package_name = elem.get( 'name', None )
- package_version = elem.get( 'version', None )
- if package_name and package_version:
- install_dir = get_install_dir( trans.app, repository, repository.installed_changeset_revision, package_name, package_version )
- if not_installed( install_dir ):
- tool_dependencies_missing = True
- break
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- return tool_dependencies_missing
- @web.expose
- @web.require_admin
def deactivate_or_uninstall_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -289,6 +255,12 @@
galaxy_url = url_for( '/', qualified=True )
url = '%srepository/find_workflows?galaxy_url=%s&webapp=galaxy&no_reset=true' % ( tool_shed_url, galaxy_url )
return trans.response.send_redirect( url )
+ @web.json
+ def get_file_contents( self, trans, file_path ):
+ # Avoid caching
+ trans.response.headers['Pragma'] = 'no-cache'
+ trans.response.headers['Expires'] = '0'
+ return get_repository_file_contents( file_path )
@web.expose
@web.require_admin
def install_tool_dependencies( self, trans, **kwd ):
@@ -594,16 +566,32 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "Repository metadata has been reset."
- tool_dependencies_missing = self.check_installed_tool_dependencies( trans, repository_id, relative_install_dir )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
repo_files_dir=repo_files_dir,
- tool_dependencies_missing=tool_dependencies_missing,
message=message,
status=status )
@web.expose
@web.require_admin
+ def manage_tool_dependencies( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository_id = kwd[ 'id' ]
+ repository = get_repository( trans, repository_id )
+ return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
+ repository=repository,
+ message=message,
+ status=status )
+ @web.json
+ def open_folder( self, trans, folder_path ):
+ # Avoid caching
+ trans.response.headers['Pragma'] = 'no-cache'
+ trans.response.headers['Expires'] = '0'
+ return open_repository_files_folder( trans, folder_path )
+ @web.expose
+ @web.require_admin
def reinstall_repository( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
@@ -709,6 +697,8 @@
trans.sa_session.flush()
if install_tool_dependencies:
dependency_str = ' along with tool dependencies'
+ if error_message:
+ dependency_str += ', but with some errors installing the dependencies'
else:
dependency_str = ' without tool dependencies'
message += 'The <b>%s</b> repository has been reinstalled%s. ' % ( repository.name, dependency_str )
@@ -822,8 +812,7 @@
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- tool_dependencies_missing = self.check_installed_tool_dependencies( trans, trans.security.encode_id( repository.id ), relative_install_dir )
- if tool_dependencies_missing:
+ if repository.missing_tool_dependencies:
message += "Select <b>Install tool dependencies</b> from the repository's pop-up menu to install tool dependencies."
else:
message = "The directory containing the installed repository named '%s' cannot be found. " % name
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -2,7 +2,6 @@
from time import strftime
from datetime import date, datetime
from galaxy import util
-from galaxy.datatypes.checkers import *
from galaxy.web.base.controller import *
from galaxy.web.form_builder import CheckboxField
from galaxy.webapps.community import model
@@ -10,7 +9,8 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
+from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, make_tmp_directory, NOT_TOOL_CONFIGS
+from galaxy.util.shed_util import open_repository_files_folder, strip_path
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -20,7 +20,6 @@
log = logging.getLogger( __name__ )
-MAX_CONTENT_SIZE = 32768
VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" )
README_FILES = [ 'readme', 'read_me', 'install' ]
@@ -496,12 +495,10 @@
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
repository = get_repository( trans, id )
repo = hg.repository( get_configured_ui(), repository.repo_path )
- current_working_dir = os.getcwd()
# Update repository files for browsing.
update_repository( repo )
is_malicious = changeset_is_malicious( trans, id, repository.tip )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
- repo=repo,
repository=repository,
commit_message=commit_message,
is_malicious=is_malicious,
@@ -1016,37 +1013,7 @@
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- if is_gzip( file_path ):
- to_html = to_html_str( '\ngzip compressed file\n' )
- elif is_bz2( file_path ):
- to_html = to_html_str( '\nbz2 compressed file\n' )
- elif check_zip( file_path ):
- to_html = to_html_str( '\nzip compressed file\n' )
- elif check_binary( file_path ):
- to_html = to_html_str( '\nBinary file\n' )
- else:
- to_html = ''
- for i, line in enumerate( open( file_path ) ):
- to_html = '%s%s' % ( to_html, to_html_str( line ) )
- if len( to_html ) > MAX_CONTENT_SIZE:
- large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
- break
- return to_html
- def __get_files( self, trans, folder_path ):
- contents = []
- for item in os.listdir( folder_path ):
- # Skip .hg directories
- if str( item ).startswith( '.hg' ):
- continue
- if os.path.isdir( os.path.join( folder_path, item ) ):
- # Append a '/' character so that our jquery dynatree will
- # function properly.
- item = '%s/' % item
- contents.append( item )
- if contents:
- contents.sort()
- return contents
+ return get_repository_file_contents( file_path )
@web.expose
def get_readme( self, trans, **kwd ):
"""If the received changeset_revision includes a file named readme (case ignored), return it's contents."""
@@ -1558,34 +1525,13 @@
state.inputs = {}
return state
@web.json
- def open_folder( self, trans, repository_id, key ):
+ def open_folder( self, trans, folder_path ):
# The tool shed includes a repository source file browser, which currently depends upon
# copies of the hg repository file store in the repo_path for browsing.
# Avoid caching
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
- repository = trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( repository_id ) )
- folder_path = key
- try:
- files_list = self.__get_files( trans, folder_path )
- except OSError, e:
- if str( e ).find( 'No such file or directory' ) >= 0:
- # We have a repository with no contents.
- return []
- folder_contents = []
- for filename in files_list:
- is_folder = False
- if filename and filename[-1] == os.sep:
- is_folder = True
- if filename:
- full_path = os.path.join( folder_path, filename )
- node = { "title": filename,
- "isFolder": is_folder,
- "isLazy": is_folder,
- "tooltip": full_path,
- "key": full_path }
- folder_contents.append( node )
- return folder_contents
+ return open_repository_files_folder( trans, folder_path )
@web.expose
def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
params = util.Params( kwd )
@@ -1769,7 +1715,6 @@
if params.get( 'select_files_to_delete_button', False ):
if selected_files_to_delete:
selected_files_to_delete = selected_files_to_delete.split( ',' )
- current_working_dir = os.getcwd()
# Get the current repository tip.
tip = repository.tip
for selected_file in selected_files_to_delete:
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/admin/tool_shed_repository/browse_repository.mako
--- a/templates/admin/tool_shed_repository/browse_repository.mako
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -2,6 +2,17 @@
<%namespace file="/message.mako" import="render_msg" /><%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "dynatree_skin/ui.dynatree" )}
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "ui.core", "jquery.dynatree" )}
+ ${common_javascripts(repository)}
+</%def>
+
<br/><br/><ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
@@ -9,6 +20,9 @@
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ %endif
</div></ul>
@@ -16,4 +30,17 @@
${render_msg( message, status )}
%endif
-${render_metadata( repository, can_reset_metadata=False )}
+<div class="toolForm">
+ <div class="toolFormTitle">Browse ${repository.name} revision ${repository.changeset_revision} files</div>
+ <div class="toolFormBody">
+ <div class="form-row" >
+ <label>Contents:</label>
+ <div id="tree" >
+ Loading...
+ </div>
+ </div>
+ <div class="form-row">
+ <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>
+ </div>
+ </div>
+</div>
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -1,154 +1,273 @@
-<%def name="render_metadata( repository, can_reset_metadata=False )">
+<%def name="common_javascripts(repository)">
+ <script type="text/javascript">
+ $(function(){
+ $("#tree").ajaxComplete(function(event, XMLHttpRequest, ajaxOptions) {
+ _log("debug", "ajaxComplete: %o", this); // dom element listening
+ });
+ // --- Initialize sample trees
+ $("#tree").dynatree({
+ title: "${repository.name}",
+ rootVisible: true,
+ minExpandLevel: 0, // 1: root node is not collapsible
+ persist: false,
+ checkbox: true,
+ selectMode: 3,
+ onPostInit: function(isReloading, isError) {
+ //alert("reloading: "+isReloading+", error:"+isError);
+ logMsg("onPostInit(%o, %o) - %o", isReloading, isError, this);
+ // Re-fire onActivate, so the text is updated
+ this.reactivate();
+ },
+ fx: { height: "toggle", duration: 200 },
+ // initAjax is hard to fake, so we pass the children as object array:
+ initAjax: {url: "${h.url_for( controller='admin_toolshed', action='open_folder' )}",
+ dataType: "json",
+ data: { folder_path: "${repository.repo_files_directory(trans.app)}" },
+ },
+ onLazyRead: function(dtnode){
+ dtnode.appendAjax({
+ url: "${h.url_for( controller='admin_toolshed', action='open_folder' )}",
+ dataType: "json",
+ data: { folder_path: dtnode.data.key },
+ });
+ },
+ onSelect: function(select, dtnode) {
+ // Display list of selected nodes
+ var selNodes = dtnode.tree.getSelectedNodes();
+ // convert to title/key array
+ var selKeys = $.map(selNodes, function(node) {
+ return node.data.key;
+ });
+ },
+ onActivate: function(dtnode) {
+ var cell = $("#file_contents");
+ var selected_value;
+ if (dtnode.data.key == 'root') {
+ selected_value = "${repository.repo_files_directory(trans.app)}/";
+ } else {
+ selected_value = dtnode.data.key;
+ };
+ if (selected_value.charAt(selected_value.length-1) != '/') {
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='admin_toolshed', action='get_file_contents' )}",
+ dataType: "json",
+ data: { file_path: selected_value },
+ success : function ( data ) {
+ cell.html( '<label>'+data+'</label>' )
+ }
+ });
+ } else {
+ cell.html( '' );
+ };
+ },
+ });
+ });
+ </script>
+</%def>
+
+<%def name="render_repository_contents( repository, can_reset_metadata=False )"><div class="toolForm">
- <div class="toolFormTitle">Repository contents</div>
+ <div class="toolFormTitle">${repository.name}</div><div class="toolFormBody">
- <% metadata = repository.metadata %>
- %if metadata:
- %if 'tools' in metadata:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Tools</b><i> - click the name to view tool related information</i></td>
+ <%
+ metadata = repository.metadata
+ missing_tool_dependencies = repository.missing_tool_dependencies
+ installed_tool_dependencies = repository.installed_tool_dependencies
+ %>
+ %if missing_tool_dependencies:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Missing tool dependencies</i></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>type</b></td>
+ <td><b>version</b></td>
+ </tr>
+ %for name, requirements_dict in missing_tool_dependencies.items():
+ <tr>
+ <td>${requirements_dict[ 'name' ]}</td>
+ <td>${requirements_dict[ 'type' ]}</td>
+ <td>${requirements_dict[ 'version' ]}</td></tr>
- </table>
- </div>
- <div class="form-row">
- <% tool_dicts = metadata[ 'tools' ] %>
- <table class="grid">
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if installed_tool_dependencies:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Installed tool dependencies</i></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>type</b></td>
+ <td><b>version</b></td>
+ </tr>
+ %for installed_tool_dependency in installed_tool_dependencies:
<tr>
- <td><b>name</b></td>
- <td><b>description</b></td>
- <td><b>version</b></td>
- <td><b>requirements</b></td>
+ <td>${installed_tool_dependency.name}</td>
+ <td>${installed_tool_dependency.type}</td>
+ <td>${installed_tool_dependency.version}</td></tr>
- %for tool_dict in tool_dicts:
- <tr>
- <td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=tool_dict[ 'id' ] )}">
- ${tool_dict[ 'name' ]}
- </a>
- </td>
- <td>${tool_dict[ 'description' ]}</td>
- <td>${tool_dict[ 'version' ]}</td>
- <td>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'tools' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Tools</b><i> - click the name to view information about the tool</i></td>
+ </tr>
+ </table>
+ </div>
+ <div class="form-row">
+ <% tool_dicts = metadata[ 'tools' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>description</b></td>
+ <td><b>version</b></td>
+ <td><b>requirements</b></td>
+ </tr>
+ %for tool_dict in tool_dicts:
+ <tr>
+ <td>
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=tool_dict[ 'id' ] )}">
+ ${tool_dict[ 'name' ]}
+ </a>
+ </td>
+ <td>${tool_dict[ 'description' ]}</td>
+ <td>${tool_dict[ 'version' ]}</td>
+ <td>
+ <%
+ if 'requirements' in tool_dict:
+ requirements = tool_dict[ 'requirements' ]
+ else:
+ requirements = None
+ %>
+ %if requirements:
<%
- if 'requirements' in tool_dict:
- requirements = tool_dict[ 'requirements' ]
- else:
- requirements = None
+ requirements_str = ''
+ for requirement_dict in tool_dict[ 'requirements' ]:
+ requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
+ requirements_str = requirements_str.rstrip( ', ' )
%>
- %if requirements:
- <%
- requirements_str = ''
- for requirement_dict in tool_dict[ 'requirements' ]:
- requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
- requirements_str = requirements_str.rstrip( ', ' )
- %>
- ${requirements_str}
- %else:
- none
- %endif
- </td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'workflows' in metadata:
- ## metadata[ 'workflows' ] is a list of tuples where each contained tuple is
- ## [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Workflows</b><i> - click the name to import</i></td>
+ ${requirements_str}
+ %else:
+ none
+ %endif
+ </td></tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <% workflow_tups = metadata[ 'workflows' ] %>
- <table class="grid">
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'workflows' in metadata:
+ ## metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+ ## [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Workflows</b><i> - click the name to import</i></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% workflow_tups = metadata[ 'workflows' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>steps</b></td>
+ <td><b>format-version</b></td>
+ <td><b>annotation</b></td>
+ </tr>
+ <% index = 0 %>
+ %for workflow_tup in workflow_tups:
+ <%
+ import os.path
+ relative_path = workflow_tup[ 0 ]
+ full_path = os.path.abspath( relative_path )
+ workflow_dict = workflow_tup[ 1 ]
+ workflow_name = workflow_dict[ 'name' ]
+ ## Initially steps were not stored in the metadata record.
+ steps = workflow_dict.get( 'steps', [] )
+ format_version = workflow_dict[ 'format-version' ]
+ annotation = workflow_dict[ 'annotation' ]
+ %><tr>
- <td><b>name</b></td>
- <td><b>steps</b></td>
- <td><b>format-version</b></td>
- <td><b>annotation</b></td>
+ <td>
+ <div class="menubutton" style="float: left;" id="workflow-${index}-popup">
+ ${workflow_name}
+ <div popupmenu="workflow-${index}-popup">
+ <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', installed_repository_file=full_path, repository_id=trans.security.encode_id( repository.id ) )}">Import to Galaxy</a>
+ </div>
+ </div>
+ </td>
+ <td>
+ %if steps:
+ ${len( steps )}
+ %else:
+ unknown
+ %endif
+ </td>
+ <td>${format_version}</td>
+ <td>${annotation}</td></tr>
- <% index = 0 %>
- %for workflow_tup in workflow_tups:
- <%
- import os.path
- relative_path = workflow_tup[ 0 ]
- full_path = os.path.abspath( relative_path )
- workflow_dict = workflow_tup[ 1 ]
- workflow_name = workflow_dict[ 'name' ]
- ## Initially steps were not stored in the metadata record.
- steps = workflow_dict.get( 'steps', [] )
- format_version = workflow_dict[ 'format-version' ]
- annotation = workflow_dict[ 'annotation' ]
- %>
- <tr>
- <td>
- <div class="menubutton" style="float: left;" id="workflow-${index}-popup">
- ${workflow_name}
- <div popupmenu="workflow-${index}-popup">
- <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', installed_repository_file=full_path, repository_id=trans.security.encode_id( repository.id ) )}">Import to Galaxy</a>
- </div>
- </div>
- </td>
- <td>
- %if steps:
- ${len( steps )}
- %else:
- unknown
- %endif
- </td>
- <td>${format_version}</td>
- <td>${annotation}</td>
- </tr>
- <% index += 1 %>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'datatypes' in metadata:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Data types</b></td>
+ <% index += 1 %>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ %if 'datatypes' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Data types</b></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% datatypes_dicts = metadata[ 'datatypes' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>extension</b></td>
+ <td><b>type</b></td>
+ <td><b>mimetype</b></td>
+ <td><b>subclass</b></td>
+ </tr>
+ %for datatypes_dict in datatypes_dicts:
+ <tr>
+ <td>${datatypes_dict.get( 'extension', ' ' )}</td>
+ <td>${datatypes_dict.get( 'dtype', ' ' )}</td>
+ <td>${datatypes_dict.get( 'mimetype', ' ' )}</td>
+ <td>${datatypes_dict.get( 'subclass', ' ' )}</td></tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <% datatypes_dicts = metadata[ 'datatypes' ] %>
- <table class="grid">
- <tr>
- <td><b>extension</b></td>
- <td><b>type</b></td>
- <td><b>mimetype</b></td>
- <td><b>subclass</b></td>
- </tr>
- %for datatypes_dict in datatypes_dicts:
- <%
- extension = datatypes_dict.get( 'extension', ' ' )
- dtype = datatypes_dict.get( 'dtype', ' ' )
- mimetype = datatypes_dict.get( 'mimetype', ' ' )
- subclass = datatypes_dict.get( 'subclass', ' ' )
- %>
- <tr>
- <td>${extension}</td>
- <td>${dtype}</td>
- <td>${mimetype}</td>
- <td>${subclass}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
%endif
%if can_reset_metadata:
<form name="set_metadata" action="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post">
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
--- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
+++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
@@ -6,9 +6,12 @@
<ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="repository-${repository.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ %endif
</div></ul>
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -6,13 +6,13 @@
<ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="repository-${repository.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
%if repository.includes_tools:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
%endif
- %if tool_dependencies_missing:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install tool dependencies</a>
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a></div>
@@ -64,5 +64,5 @@
</div></div><p/>
-${render_metadata( repository, can_reset_metadata=True )}
+${render_repository_contents( repository, can_reset_metadata=True )}
<p/>
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/admin/tool_shed_repository/manage_tool_dependencies.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
@@ -0,0 +1,56 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import os %>
+
+<br/><br/>
+<ul class="manage-table-actions">
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ %if repository.includes_tools:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
+ %endif
+ %if repository.missing_tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ </div>
+</ul>
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+ <div class="toolFormTitle">${repository.name} repository's installed tool dependencies</div>
+ <div class="toolFormBody">
+ <div class="form-row">
+ <table class="grid">
+ %for tool_dependency in repository.tool_dependencies:
+ <%
+ name = tool_dependency.name
+ version = tool_dependency.version
+ type = tool_dependency.type
+ installed_changeset_revision = tool_dependency.installed_changeset_revision
+ uninstalled = tool_dependency.uninstalled
+ install_dir = os.path.abspath( os.path.join( trans.app.config.tool_dependency_dir,
+ name,
+ version,
+ repository.owner,
+ repository.name,
+ installed_changeset_revision ) )
+ %>
+ <tr><td bgcolor="#D8D8D8"><b>Name</b></td><td bgcolor="#D8D8D8">${name}</td></tr>
+ <tr><th>Version</th><td>${version}</td></tr>
+ <tr><th>Type</th><td>${type}</td></tr>
+ <tr><th>Install directory</th><td>${install_dir}</td></tr>
+ <tr><th>Installed changeset revision</th><td>${installed_changeset_revision}</td></tr>
+ <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
+ %endfor
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ </div>
+</div>
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/admin/tool_shed_repository/view_tool_metadata.mako
--- a/templates/admin/tool_shed_repository/view_tool_metadata.mako
+++ b/templates/admin/tool_shed_repository/view_tool_metadata.mako
@@ -5,9 +5,12 @@
<ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="repository-${repository.id}-popup">
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ %endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a></div></ul>
@@ -95,15 +98,27 @@
requirements = None
%>
%if requirements:
- <%
- requirements_str = ''
- for requirement_dict in metadata[ 'requirements' ]:
- requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
- requirements_str = requirements_str.rstrip( ', ' )
- %><div class="form-row"><label>Requirements:</label>
- ${requirements_str}
+ <table class="grid">
+ <tr>
+ <td><b>name</b></td>
+ <td><b>version</b></td>
+ <td><b>type</b></td>
+ </tr>
+ %for requirement_dict in requirements:
+ <%
+ requirement_name = requirement_dict[ 'name' ] or 'not provided'
+ requirement_version = requirement_dict[ 'version' ] or 'not provided'
+ requirement_type = requirement_dict[ 'type' ] or 'not provided'
+ %>
+ <tr>
+ <td>${requirement_name}</td>
+ <td>${requirement_version}</td>
+ <td>${requirement_type}</td>
+ </tr>
+ %endfor
+ </table><div style="clear: both"></div></div>
%endif
diff -r 599e16b18e34267a9563f47b1acaa234165482ea -r b1f35669f93c4163cf803d64beb37a1cb0d5381c templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -22,13 +22,13 @@
// initAjax is hard to fake, so we pass the children as object array:
initAjax: {url: "${h.url_for( controller='repository', action='open_folder' )}",
dataType: "json",
- data: { repository_id: "${trans.security.encode_id( repository.id )}", key: "${repository.repo_path}" },
+ data: { folder_path: "${repository.repo_path}" },
},
onLazyRead: function(dtnode){
dtnode.appendAjax({
url: "${h.url_for( controller='repository', action='open_folder' )}",
dataType: "json",
- data: { repository_id: "${trans.security.encode_id( repository.id )}", key: dtnode.data.key },
+ data: { folder_path: dtnode.data.key },
});
},
onSelect: function(select, dtnode) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Make migration script compatible with MySQL.
by Bitbucket 13 Jun '12
by Bitbucket 13 Jun '12
13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/599e16b18e34/
changeset: 599e16b18e34
user: jgoecks
date: 2012-06-13 21:41:52
summary: Make migration script compatible with MySQL.
affected #: 1 file
diff -r ebd30e5e9120181833621fc9710c1066ad3f9aa7 -r 599e16b18e34267a9563f47b1acaa234165482ea lib/galaxy/model/migrate/versions/0095_hda_subsets.py
--- a/lib/galaxy/model/migrate/versions/0095_hda_subsets.py
+++ b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py
@@ -17,8 +17,8 @@
HistoryDatasetAssociationSubset_table = Table( "history_dataset_association_subset", metadata,
Column( "id", Integer, primary_key=True ),
- Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
- Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+ Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ) ),
+ Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ) ),
Column( "location", Unicode(255), index=True)
)
@@ -32,6 +32,16 @@
except Exception, e:
print str(e)
log.debug( "Creating history_dataset_association_subset table failed: %s" % str( e ) )
+
+ # Manually create indexes because they are too long for MySQL databases.
+ i1 = Index( "ix_hda_id", HistoryDatasetAssociationSubset_table.c.history_dataset_association_id )
+ i2 = Index( "ix_hda_subset_id", HistoryDatasetAssociationSubset_table.c.history_dataset_association_subset_id )
+ try:
+ i1.create()
+ i2.create()
+ except Exception, e:
+ print str(e)
+ log.debug( "Adding indices to table 'history_dataset_association_subset' table failed: %s" % str( e ) )
def downgrade():
metadata.reflect()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ebd30e5e9120/
changeset: ebd30e5e9120
user: jgoecks
date: 2012-06-13 18:57:41
summary: Improve dataset fetching in visualizations.
affected #: 3 files
diff -r 5a3c60aca201a3cefc1baef1f945043afc52a3be -r ebd30e5e9120181833621fc9710c1066ad3f9aa7 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -965,7 +965,7 @@
f = open( self.original_dataset.file_name )
return f, BigBedFile(file=f)
-class BigWigDataProvider (BBIDataProvider ):
+class BigWigDataProvider ( BBIDataProvider ):
def _get_dataset( self ):
if self.converted_dataset is not None:
f = open( self.converted_dataset.file_name )
diff -r 5a3c60aca201a3cefc1baef1f945043afc52a3be -r ebd30e5e9120181833621fc9710c1066ad3f9aa7 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -468,10 +468,7 @@
def pack_track( track_dict ):
dataset_id = track_dict['dataset_id']
hda_ldda = track_dict.get('hda_ldda', 'hda')
- if hda_ldda == "hda":
- dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
- else:
- dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( dataset_id )
+ dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
try:
prefs = track_dict['prefs']
@@ -545,6 +542,13 @@
return config
+ def get_hda_or_ldda( self, trans, hda_ldda, dataset_id ):
+ """ Returns either HDA or LDDA for hda/ldda and id combination. """
+ if hda_ldda == "hda":
+ return self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
+ else:
+ return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
+
# -- Helper functions --
def _create_visualization( self, trans, title, type, dbkey, slug=None, annotation=None ):
diff -r 5a3c60aca201a3cefc1baef1f945043afc52a3be -r ebd30e5e9120181833621fc9710c1066ad3f9aa7 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -177,17 +177,22 @@
@web.json
def add_track_async(self, trans, hda_id=None, ldda_id=None):
+ # Get dataset.
if hda_id:
hda_ldda = "hda"
- dataset = self.get_dataset( trans, hda_id, check_ownership=False, check_accessible=True )
+ dataset_id = hda_id
elif ldda_id:
hda_ldda = "ldda"
- dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
+ dataset_id = ldda_id
+ dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+
+ # Get data provider.
track_type, _ = dataset.datatype.get_track_type()
track_data_provider_class = get_data_provider( original_dataset=dataset )
track_data_provider = track_data_provider_class( original_dataset=dataset )
- track = {
+ # Get track definition.
+ return {
"track_type": track_type,
"name": dataset.name,
"hda_ldda": hda_ldda,
@@ -197,16 +202,17 @@
"tool": get_tool_def( trans, dataset ),
"tool_state": {}
}
- return track
@web.json
def bookmarks_from_dataset( self, trans, hda_id=None, ldda_id=None ):
if hda_id:
hda_ldda = "hda"
- dataset = self.get_dataset( trans, hda_id, check_ownership=False, check_accessible=True )
+ dataset_id = hda_id
elif ldda_id:
hda_ldda = "ldda"
- dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
+ dataset_id = ldda_id
+ dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+
rows = []
if isinstance( dataset.datatype, Bed ):
data = RawBedDataProvider( original_dataset=dataset ).get_iterator()
@@ -482,7 +488,7 @@
@web.require_login( "use Galaxy visualizations", use_panels=True )
def paramamonster( self, trans, hda_ldda, dataset_id ):
# Get dataset.
- dataset = self._get_dataset( trans, hda_ldda, dataset_id )
+ dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
return trans.fill_template_mako( "visualization/paramamonster.mako", dataset=dataset,
tool=self.app.toolbox.tools_by_id[ 'cufflinks' ].to_dict( trans, for_display=True ) )
@@ -491,7 +497,7 @@
@web.require_login( "use Galaxy visualizations", use_panels=True )
def circster( self, trans, hda_ldda, dataset_id ):
# Get dataset.
- dataset = self._get_dataset( trans, hda_ldda, dataset_id )
+ dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
# Get genome info.
dbkey = dataset.dbkey
@@ -535,11 +541,4 @@
# Store msg.
data_sources_dict[ source_type ] = { "name" : data_source, "message": msg }
- return data_sources_dict
-
- def _get_dataset( self, trans, hda_ldda, dataset_id ):
- """ Returns either HDA or LDDA for hda/ldda and id combination. """
- if hda_ldda == "hda":
- return self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
- else:
- return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
\ No newline at end of file
+ return data_sources_dict
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Updates to trackster to work with f93ecd917348 and 4fbd05095ca7.
by Bitbucket 13 Jun '12
by Bitbucket 13 Jun '12
13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5a3c60aca201/
changeset: 5a3c60aca201
user: jgoecks
date: 2012-06-13 18:29:20
summary: Updates to trackster to work with f93ecd917348 and 4fbd05095ca7.
affected #: 3 files
diff -r f93ecd917348aeef6332a6d441300ab505daad73 -r 5a3c60aca201a3cefc1baef1f945043afc52a3be lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -327,7 +327,7 @@
# Get datasources and check for messages.
data_sources = self._get_datasources( trans, dataset )
messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
- msg = _get_highest_priority_msg( messages_list )
+ msg = get_highest_priority_msg( messages_list )
if msg:
return msg
@@ -375,7 +375,7 @@
# Get datasources and check for messages.
data_sources = self._get_datasources( trans, dataset )
messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
- return_message = _get_highest_priority_msg( messages_list )
+ return_message = get_highest_priority_msg( messages_list )
if return_message:
return return_message
diff -r f93ecd917348aeef6332a6d441300ab505daad73 -r 5a3c60aca201a3cefc1baef1f945043afc52a3be templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -42,22 +42,21 @@
<script type='text/javascript' src="${h.url_for('/static/scripts/excanvas.js')}"></script><![endif]-->
-${h.js( "galaxy.base", "galaxy.panels", "json2", "jquery", "jstorage", "jquery.event.drag", "jquery.event.hover","jquery.mousewheel", "jquery.autocomplete", "viz/visualization", "viz/trackster", "viz/trackster_ui", "jquery.ui.sortable.slider", "farbtastic" )}
+${h.js( "galaxy.base", "galaxy.panels", "json2", "jquery", "jstorage", "jquery.event.drag", "jquery.event.hover","jquery.mousewheel", "jquery.autocomplete", "mvc/data", "viz/visualization", "viz/trackster", "viz/trackster_ui", "jquery.ui.sortable.slider", "farbtastic" )}
<script type="text/javascript">
//
// Place URLs here so that url_for can be used to generate them.
//
galaxy_paths.set({
- visualization_url: "${h.url_for( action='save' )}"
+ visualization_url: "${h.url_for( action='save' )}",
+ run_tool_url: "${h.url_for( controller='/api/tools' )}"
});
var
add_track_async_url = "${h.url_for( action='add_track_async' )}",
add_datasets_url = "${h.url_for( action='list_current_history_datasets' )}",
default_data_url = "${h.url_for( action='data' )}",
- raw_data_url = "${h.url_for( action='raw_data' )}",
- run_tool_url = "${h.url_for( action='run_tool' )}",
- rerun_tool_url = "${h.url_for( action='rerun_tool' )}",
+ raw_data_url = "${h.url_for( action='raw_data' )}",
reference_url = "${h.url_for( action='reference' )}",
chrom_url = "${h.url_for( action='chroms' )}",
dataset_state_url = "${h.url_for( action='dataset_state' )}",
diff -r f93ecd917348aeef6332a6d441300ab505daad73 -r 5a3c60aca201a3cefc1baef1f945043afc52a3be templates/visualization/paramamonster.mako
--- a/templates/visualization/paramamonster.mako
+++ b/templates/visualization/paramamonster.mako
@@ -35,18 +35,42 @@
${parent.javascripts()}
${h.templates( "tool_link", "panel_section", "tool_search" )}
- ${h.js( "libs/d3", "viz/visualization", "viz/paramamonster", "mvc/tools" )}
+ ${h.js( "libs/d3", "mvc/data", "mvc/tools", "viz/visualization", "viz/paramamonster" )}
<script type="text/javascript">
+ var tool;
$(function() {
// -- Viz set up. --
- var tool = new Tool(JSON.parse('${ h.to_json_string( tool ) }')),
- tool_param_tree = new ToolParameterTree({ tool: tool }),
- tool_param_tree_view = new ToolParameterTreeView({ model: tool_param_tree });
+ tool = new Tool(JSON.parse('${ h.to_json_string( tool ) }'));
+ // HACK: need to replace \ with \\ due to simplejson bug.
+ var dataset = new Dataset(JSON.parse('${ h.to_json_string( dataset.get_api_value() ).replace('\\', '\\\\' ) }')),
+ paramamonster_viz = new ParamaMonsterVisualization({
+ tool: tool,
+ dataset: dataset
+ });
+ viz_view = new ParamaMonsterVisualizationView({ model: paramamonster_viz });
- tool_param_tree_view.render();
- $('#param-tree').append(tool_param_tree_view.$el);
+ viz_view.render();
+ $('.unified-panel-body').append(viz_view.$el);
+
+ // Tool testing.
+ var regions = [
+ new GenomeRegion({
+ chrom: 'chr19',
+ start: '10000',
+ end: '26000'
+ }),
+ new GenomeRegion({
+ chrom: 'chr19',
+ start: '30000',
+ end: '36000'
+ })
+ ];
+
+ $.when(tool.rerun(dataset, regions)).then(function(outputs) {
+ console.log(outputs);
+ });
});
</script></%def>
@@ -59,7 +83,5 @@
<div style="clear: both"></div></div><div class="unified-panel-body">
- <div id="param-tree"></div>
- <div id="tile-view"></div></div></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Viz framework: (a) JavaScript support for running tools and enhancements to parameter viz.
by Bitbucket 13 Jun '12
by Bitbucket 13 Jun '12
13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f93ecd917348/
changeset: f93ecd917348
user: jgoecks
date: 2012-06-13 18:10:54
summary: Viz framework: (a) JavaScript support for running tools and enhancements to parameter viz.
affected #: 5 files
diff -r 7387d416d655e086ad3b792058069ea82ce23a3c -r f93ecd917348aeef6332a6d441300ab505daad73 static/scripts/mvc/data.js
--- /dev/null
+++ b/static/scripts/mvc/data.js
@@ -0,0 +1,16 @@
+/**
+ * A dataset. In Galaxy, datasets are associated with a history, so
+ * this object is also known as a HistoryDatasetAssociation.
+ */
+var Dataset = Backbone.Model.extend({
+ defaults: {
+ id: "",
+ type: "",
+ name: "",
+ hda_ldda: ""
+ }
+});
+
+var DatasetCollection = Backbone.Collection.extend({
+ model: Dataset
+});
\ No newline at end of file
diff -r 7387d416d655e086ad3b792058069ea82ce23a3c -r f93ecd917348aeef6332a6d441300ab505daad73 static/scripts/mvc/tools.js
--- a/static/scripts/mvc/tools.js
+++ b/static/scripts/mvc/tools.js
@@ -51,11 +51,78 @@
}
],
- urlRoot: galaxy_paths.attributes.root_path + 'api/tools',
+ urlRoot: galaxy_paths.get('tool_url'),
apply_search_results: function(results) {
( _.indexOf(results, this.attributes.id) !== -1 ? this.show() : this.hide() );
return this.is_visible();
+ },
+
+ /**
+ * Set a tool input's value.
+ */
+ set_input_value: function(name, value) {
+ this.get('inputs').find(function(input) {
+ return input.get('name') === name;
+ }).set('value', value);
+ },
+
+ /**
+ * Run tool; returns a Deferred that resolves to the tool's output(s).
+ */
+ run: function() {
+ return this._run()
+ },
+
+ /**
+ * Rerun tool using regions and a target dataset.
+ */
+ rerun: function(target_dataset, regions) {
+ return this._run({
+ action: 'rerun',
+ target_dataset_id: target_dataset.id,
+ regions: JSON.stringify(regions)
+ });
+ },
+
+ /**
+ * Run tool; returns a Deferred that resolves to the tool's output(s).
+ * NOTE: this method is a helper method and should not be called directly.
+ */
+ _run: function(additional_params) {
+ // Create payload.
+ var payload = _.extend({
+ tool_id: this.id
+ }, additional_params),
+ input_dict = {};
+ this.get('inputs').each(function(input) {
+ input_dict[input.get('name')] = input.get('value');
+ });
+ payload.inputs = input_dict;
+
+ // Because job may require indexing datasets, use server-side
+ // deferred to ensure that job is run. Also use deferred that
+ // resolves to outputs from tool.
+ var run_deferred = $.Deferred(),
+ ss_deferred = new ServerStateDeferred({
+ ajax_settings: {
+ url: this.urlRoot,
+ data: JSON.stringify(payload),
+ dataType: "json",
+ contentType: 'application/json',
+ type: "POST"
+ },
+ interval: 2000,
+ success_fn: function(response) {
+ return response !== "pending";
+ }
+ });
+
+ // Run job and resolve run_deferred to tool outputs.
+ $.when(ss_deferred.go()).then(function(result) {
+ run_deferred.resolve(new DatasetCollection().reset(result));
+ });
+ return run_deferred;
}
});
@@ -65,7 +132,9 @@
var ToolInput = Backbone.RelationalModel.extend({
defaults: {
name: null,
- type: null
+ label: null,
+ type: null,
+ value: null,
},
initialize: function() {
diff -r 7387d416d655e086ad3b792058069ea82ce23a3c -r f93ecd917348aeef6332a6d441300ab505daad73 static/scripts/viz/paramamonster.js
--- a/static/scripts/viz/paramamonster.js
+++ b/static/scripts/viz/paramamonster.js
@@ -38,15 +38,16 @@
var
param_samples = params_samples[index],
param = param_samples.get('param'),
- param_name = param.get('name'),
+ param_label = param.get('label'),
settings = param_samples.get('samples');
// Create leaves when last parameter setting is reached.
if (params_samples.length - 1 === index) {
return _.map(settings, function(setting) {
return {
- name: param_name + '=' + setting,
- param: param
+ name: param_label + '=' + setting,
+ param: param,
+ value: setting
}
});
}
@@ -54,8 +55,9 @@
// Recurse to handle other parameters.
return _.map(settings, function(setting) {
return {
- name: param_name + '=' + setting,
+ name: param_label + '=' + setting,
param: param,
+ value: setting,
children: create_tree_data(filtered_params_samples, index + 1)
}
});
@@ -94,104 +96,17 @@
});
/**
- * A track in a genome browser.
+ * ParamaMonster visualization model.
*/
-var Track = Backbone.Model.extend({
- defaults: {
- dataset: null
- }
-});
-
-var FeatureTrack = Track.extend({
- defaults: {
- track: null
- },
+var ParamaMonsterVisualization = Visualization.extend({
+ defaults: _.extend({}, Visualization.prototype.defaults, {
+ tool: null,
+ parameter_tree: null,
+ regions: null
+ }),
- /**
- * Draw FeatureTrack tile.
- * @param result result from server
- * @param cxt canvas context to draw on
- * @param mode mode to draw in
- * @param resolution view resolution
- * @param region region to draw
- * @param w_scale pixels per base
- * @param ref_seq reference sequence data
- */
- draw_tile: function(result, ctx, mode, resolution, region, w_scale, ref_seq) {
- var track = this,
- canvas = ctx.canvas,
- tile_low = region.get('start'),
- tile_high = region.get('end'),
- min_height = 25,
- left_offset = this.left_offset;
-
- // Drawing the summary tree (feature coverage histogram)
- if (mode === "summary_tree" || mode === "Histogram") {
- // Get summary tree data if necessary and set max if there is one.
- if (result.dataset_type !== "summary_tree") {
- var st_data = this.get_summary_tree_data(result.data, tile_low, tile_high, 200);
- if (result.max) {
- st_data.max = result.max;
- }
- result = st_data;
- }
- // Paint summary tree into canvas
- var painter = new painters.SummaryTreePainter(result, tile_low, tile_high, this.prefs);
- painter.draw(ctx, canvas.width, canvas.height, w_scale);
- return new SummaryTreeTile(track, tile_index, resolution, canvas, result.data, result.max);
- }
-
- // Handle row-by-row tracks
-
- // Preprocessing: filter features and determine whether all unfiltered features have been slotted.
- var
- filtered = [],
- slots = this.slotters[w_scale].slots;
- all_slotted = true;
- if ( result.data ) {
- var filters = this.filters_manager.filters;
- for (var i = 0, len = result.data.length; i < len; i++) {
- var feature = result.data[i];
- var hide_feature = false;
- var filter;
- for (var f = 0, flen = filters.length; f < flen; f++) {
- filter = filters[f];
- filter.update_attrs(feature);
- if (!filter.keep(feature)) {
- hide_feature = true;
- break;
- }
- }
- if (!hide_feature) {
- // Feature visible.
- filtered.push(feature);
- // Set flag if not slotted.
- if ( !(feature[0] in slots) ) {
- all_slotted = false;
- }
- }
- }
- }
-
- // Create painter.
- var filter_alpha_scaler = (this.filters_manager.alpha_filter ? new FilterScaler(this.filters_manager.alpha_filter) : null);
- var filter_height_scaler = (this.filters_manager.height_filter ? new FilterScaler(this.filters_manager.height_filter) : null);
- // HACK: ref_seq will only be defined for ReadTracks, and only the ReadPainter accepts that argument
- var painter = new (this.painter)(filtered, tile_low, tile_high, this.prefs, mode, filter_alpha_scaler, filter_height_scaler, ref_seq);
- var feature_mapper = null;
-
- // console.log(( tile_low - this.view.low ) * w_scale, tile_index, w_scale);
- ctx.fillStyle = this.prefs.block_color;
- ctx.font = ctx.canvas.manager.default_font;
- ctx.textAlign = "right";
-
- if (result.data) {
- // Draw features.
- feature_mapper = painter.draw(ctx, canvas.width, canvas.height, w_scale, slots);
- feature_mapper.translation = -left_offset;
- }
-
- return new FeatureTrackTile(track, tile_index, resolution, canvas, result.data, w_scale, mode, result.message, all_slotted, feature_mapper);
+ initialize: function(options) {
+ this.set('parameter_tree', new ToolParameterTree({ tool: this.get('tool') }));
}
});
@@ -207,7 +122,6 @@
className: 'tool-parameter-tree',
initialize: function(options) {
- this.model = options.model;
},
render: function() {
@@ -244,7 +158,32 @@
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; });
- // Set up behavior when node is clicked.
+ node.append("circle")
+ .attr("r", 4.5);
+
+ node.append("text")
+ .attr("dx", function(d) { return d.children ? -8 : 8; })
+ .attr("dy", 3)
+ .attr("text-anchor", function(d) { return d.children ? "end" : "start"; })
+ .text(function(d) { return d.name; });
+ }
+});
+
+var ParamaMonsterVisualizationView = Backbone.View.extend({
+ className: 'paramamonster',
+
+ initialize: function(options) {
+
+ },
+
+ render: function() {
+ // Set up tool parameter tree.
+ var tool_param_tree_view = new ToolParameterTreeView({ model: this.model.get('parameter_tree') });
+ tool_param_tree_view.render();
+ this.$el.append(tool_param_tree_view.$el);
+
+ // When node clicked in tree, run tool and show tiles.
+ var node = d3.select(tool_param_tree_view.$el[0]).selectAll("g.node")
node.on("click", function(d, i) {
console.log(d, i);
@@ -256,14 +195,6 @@
// Display tiles for region(s) of interest.
});
-
- node.append("circle")
- .attr("r", 4.5);
-
- node.append("text")
- .attr("dx", function(d) { return d.children ? -8 : 8; })
- .attr("dy", 3)
- .attr("text-anchor", function(d) { return d.children ? "end" : "start"; })
- .text(function(d) { return d.name; });
- }
+
+ },
});
\ No newline at end of file
diff -r 7387d416d655e086ad3b792058069ea82ce23a3c -r f93ecd917348aeef6332a6d441300ab505daad73 static/scripts/viz/visualization.js
--- a/static/scripts/viz/visualization.js
+++ b/static/scripts/viz/visualization.js
@@ -11,11 +11,10 @@
/**
* Implementation of a server-state based deferred. Server is repeatedly polled, and when
* condition is met, deferred is resolved.
- */
+ */
var ServerStateDeferred = Backbone.Model.extend({
defaults: {
- url: null,
- url_params: {},
+ ajax_settings: {},
interval: 1000,
success_fn: function(result) { return true; }
},
@@ -26,10 +25,11 @@
go: function() {
var deferred = $.Deferred(),
self = this,
+ ajax_settings = self.get('ajax_settings'),
success_fn = self.get('success_fn'),
interval = self.get('interval'),
_go = function() {
- $.getJSON(self.get('url'), self.get('url_params'), function(result) {
+ $.ajax(ajax_settings).success(function(result) {
if (success_fn(result)) {
// Result is good, so resolve.
deferred.resolve(result);
@@ -372,6 +372,14 @@
return this.get('chrom') + ":" + this.get('start') + "-" + this.get('end');
},
+ toJSON: function() {
+ return {
+ chrom: this.get('chrom'),
+ start: this.get('start'),
+ end: this.get('end')
+ }
+ },
+
/**
* Compute the type of overlap between this region and another region. The overlap is computed relative to the given/second region;
* hence, OVERLAP_START indicates that the first region overlaps the start (but not the end) of the second region.
@@ -495,19 +503,6 @@
});
/**
- * A dataset. In Galaxy, datasets are associated with a history, so
- * this object is also known as a HistoryDatasetAssociation.
- */
-var Dataset = Backbone.Model.extend({
- defaults: {
- id: "",
- type: "",
- name: "",
- hda_ldda: ""
- }
-});
-
-/**
* A histogram dataset.
*/
var HistogramDataset = Backbone.Model.extend({
diff -r 7387d416d655e086ad3b792058069ea82ce23a3c -r f93ecd917348aeef6332a6d441300ab505daad73 templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -52,7 +52,8 @@
// Set up needed paths.
var galaxy_paths = new GalaxyPaths({
root_path: '${h.url_for( "/" )}',
- image_path: '${h.url_for( "/static/images" )}'
+ image_path: '${h.url_for( "/static/images" )}',
+ tool_url: '${h.url_for( controller="/api/tools" )}'
});
</script></%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7387d416d655/
changeset: 7387d416d655
user: jgoecks
date: 2012-06-13 17:35:25
summary: Remove debugging statement.
affected #: 1 file
diff -r 7705869d2c7798dbd31b30d6a229cbc3b9ab3e87 -r 7387d416d655e086ad3b792058069ea82ce23a3c lib/galaxy/web/api/tools.py
--- a/lib/galaxy/web/api/tools.py
+++ b/lib/galaxy/web/api/tools.py
@@ -135,7 +135,6 @@
# Run tool on region if region is specificied.
run_on_regions = False
regions = from_json_string( payload.get( 'regions', None ) )
- print regions, payload
if regions:
if isinstance( regions, dict ):
# Regions is a single region.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7705869d2c77/
changeset: 7705869d2c77
user: jgoecks
date: 2012-06-13 17:34:09
summary: Method name updates related to 4fbd05095ca7
affected #: 1 file
diff -r 77ceb114bf57172579310a0b4347da0f5f46dec6 -r 7705869d2c7798dbd31b30d6a229cbc3b9ab3e87 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -273,7 +273,7 @@
# Dataset check.
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
- msg = self._check_dataset_state( trans, dataset )
+ msg = self.check_dataset_state( trans, dataset )
if msg:
return msg
@@ -301,7 +301,7 @@
# Dataset check.
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
- msg = self._check_dataset_state( trans, dataset )
+ msg = self.check_dataset_state( trans, dataset )
if not msg:
msg = messages.DATA
@@ -320,7 +320,7 @@
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
- msg = self._check_dataset_state( trans, dataset )
+ msg = self.check_dataset_state( trans, dataset )
if msg:
return msg
@@ -368,7 +368,7 @@
dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
dataset = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
- msg = self._check_dataset_state( trans, dataset )
+ msg = self.check_dataset_state( trans, dataset )
if msg:
return msg
@@ -530,7 +530,7 @@
msg = None
else:
# Convert.
- msg = self._convert_dataset( trans, dataset, data_source )
+ msg = self.convert_dataset( trans, dataset, data_source )
# Store msg.
data_sources_dict[ source_type ] = { "name" : data_source, "message": msg }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

13 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/77ceb114bf57/
changeset: 77ceb114bf57
user: dannon
date: 2012-06-13 17:31:00
summary: Initial commit of Galaxy Documentation.
affected #: 75 files
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/Makefile
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Galaxy.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Galaxy.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/Galaxy"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Galaxy"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/conf.py
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,254 @@
+# -*- coding: utf-8 -*-
+#
+# Galaxy documentation build configuration file, created by
+# sphinx-quickstart on Tue Mar 6 10:44:44 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+
+####### REQUIRED GALAXY INCLUDES
+
+sys.path.append(os.path.join(os.getcwd(), '../../lib'))
+import galaxy
+from galaxy import eggs
+
+#######
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+#configure default autodoc's action
+autodoc_default_flags = ['members', 'undoc-members']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Galaxy'
+copyright = u'2012, Galaxy Team'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+# version = '1'
+# The full version, including alpha/beta/rc tags.
+# release = '1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Galaxydoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'Galaxy.tex', u'Galaxy Documentation',
+ u'Galaxy Team', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'galaxy', u'Galaxy Documentation',
+ [u'Galaxy Team'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'Galaxy', u'Galaxy Documentation',
+ u'Galaxy Team', 'Galaxy', 'Data intensive biology for everyone.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/index.rst
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,29 @@
+Galaxy Documentation
+********************
+
+Galaxy is an open, web-based platform for accessible, reproducible, and
+transparent computational biomedical research.
+ - Accessible: Users without programming experience can easily specify
+ parameters and run tools and workflows.
+ - Reproducible: Galaxy captures information so that any user can repeat and
+ understand a complete computational analysis.
+ - Transparent: Users share and publish analyses via the web and create
+ Pages, interactive, web-based documents that describe a complete
+ analysis.
+
+Contents
+========
+.. toctree::
+ :maxdepth: 4
+
+ API Documentation <lib/galaxy.web.api>
+
+ Application Documentation <lib/galaxy>
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/fpconst.rst
--- /dev/null
+++ b/doc/source/lib/fpconst.rst
@@ -0,0 +1,7 @@
+fpconst Module
+==============
+
+.. automodule:: fpconst
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.actions.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.actions.rst
@@ -0,0 +1,11 @@
+actions Package
+===============
+
+:mod:`admin` Module
+-------------------
+
+.. automodule:: galaxy.actions.admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.datatypes.converters.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.converters.rst
@@ -0,0 +1,251 @@
+converters Package
+==================
+
+:mod:`bed_to_genetrack_converter` Module
+----------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bed_to_genetrack_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`bed_to_gff_converter` Module
+----------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bed_to_gff_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`bedgraph_to_array_tree_converter` Module
+----------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bedgraph_to_array_tree_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`bgzip` Module
+-------------------
+
+.. automodule:: galaxy.datatypes.converters.bgzip
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fasta_to_len` Module
+--------------------------
+
+.. automodule:: galaxy.datatypes.converters.fasta_to_len
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fasta_to_tabular_converter` Module
+----------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fasta_to_tabular_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fastq_to_fqtoc` Module
+----------------------------
+
+.. automodule:: galaxy.datatypes.converters.fastq_to_fqtoc
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fastqsolexa_to_fasta_converter` Module
+--------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fastqsolexa_to_fasta_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fastqsolexa_to_qual_converter` Module
+-------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fastqsolexa_to_qual_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`gff_to_bed_converter` Module
+----------------------------------
+
+.. automodule:: galaxy.datatypes.converters.gff_to_bed_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`gff_to_interval_index_converter` Module
+---------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.gff_to_interval_index_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval_to_bed_converter` Module
+---------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_bed_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval_to_bedstrict_converter` Module
+---------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_bedstrict_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval_to_coverage` Module
+----------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_coverage
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval_to_interval_index_converter` Module
+--------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_interval_index_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval_to_summary_tree_converter` Module
+------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_summary_tree_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval_to_tabix_converter` Module
+-----------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_tabix_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`lped_to_fped_converter` Module
+------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.lped_to_fped_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`lped_to_pbed_converter` Module
+------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.lped_to_pbed_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`maf_to_fasta_converter` Module
+------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.maf_to_fasta_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`maf_to_interval_converter` Module
+---------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.maf_to_interval_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`pbed_ldreduced_converter` Module
+--------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.pbed_ldreduced_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`pbed_to_lped_converter` Module
+------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.pbed_to_lped_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`picard_interval_list_to_bed6_converter` Module
+----------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.picard_interval_list_to_bed6_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sam_or_bam_to_summary_tree_converter` Module
+--------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.sam_or_bam_to_summary_tree_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sam_to_bam` Module
+------------------------
+
+.. automodule:: galaxy.datatypes.converters.sam_to_bam
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`vcf_to_interval_index_converter` Module
+---------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.vcf_to_interval_index_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`vcf_to_summary_tree_converter` Module
+-------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.vcf_to_summary_tree_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`vcf_to_vcf_bgzip` Module
+------------------------------
+
+.. automodule:: galaxy.datatypes.converters.vcf_to_vcf_bgzip
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`wiggle_to_array_tree_converter` Module
+--------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.wiggle_to_array_tree_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`wiggle_to_simple_converter` Module
+----------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.wiggle_to_simple_converter
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.datatypes.display_applications.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.display_applications.rst
@@ -0,0 +1,27 @@
+display_applications Package
+============================
+
+:mod:`application` Module
+-------------------------
+
+.. automodule:: galaxy.datatypes.display_applications.application
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`parameters` Module
+------------------------
+
+.. automodule:: galaxy.datatypes.display_applications.parameters
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`util` Module
+------------------
+
+.. automodule:: galaxy.datatypes.display_applications.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.datatypes.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.rst
@@ -0,0 +1,156 @@
+datatypes Package
+=================
+
+:mod:`assembly` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.assembly
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`binary` Module
+--------------------
+
+.. automodule:: galaxy.datatypes.binary
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`checkers` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.checkers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`chrominfo` Module
+-----------------------
+
+.. automodule:: galaxy.datatypes.chrominfo
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`coverage` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.coverage
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`data` Module
+------------------
+
+.. automodule:: galaxy.datatypes.data
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`genetics` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.genetics
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`images` Module
+--------------------
+
+.. automodule:: galaxy.datatypes.images
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`interval` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.interval
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`metadata` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.metadata
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`ngsindex` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.ngsindex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`qualityscore` Module
+--------------------------
+
+.. automodule:: galaxy.datatypes.qualityscore
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`registry` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.registry
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sequence` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.sequence
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sniff` Module
+-------------------
+
+.. automodule:: galaxy.datatypes.sniff
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tabular` Module
+---------------------
+
+.. automodule:: galaxy.datatypes.tabular
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tracks` Module
+--------------------
+
+.. automodule:: galaxy.datatypes.tracks
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`xml` Module
+-----------------
+
+.. automodule:: galaxy.datatypes.xml
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.datatypes.converters
+ galaxy.datatypes.display_applications
+ galaxy.datatypes.util
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.datatypes.util.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.util.rst
@@ -0,0 +1,27 @@
+util Package
+============
+
+:mod:`util` Package
+-------------------
+
+.. automodule:: galaxy.datatypes.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`gff_util` Module
+----------------------
+
+.. automodule:: galaxy.datatypes.util.gff_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`image_util` Module
+------------------------
+
+.. automodule:: galaxy.datatypes.util.image_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.eggs.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.eggs.rst
@@ -0,0 +1,27 @@
+eggs Package
+============
+
+:mod:`eggs` Package
+-------------------
+
+.. automodule:: galaxy.eggs
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`dist` Module
+------------------
+
+.. automodule:: galaxy.eggs.dist
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`scramble` Module
+----------------------
+
+.. automodule:: galaxy.eggs.scramble
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.exceptions.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.exceptions.rst
@@ -0,0 +1,11 @@
+exceptions Package
+==================
+
+:mod:`exceptions` Package
+-------------------------
+
+.. automodule:: galaxy.exceptions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.external_services.result_handlers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.external_services.result_handlers.rst
@@ -0,0 +1,11 @@
+result_handlers Package
+=======================
+
+:mod:`basic` Module
+-------------------
+
+.. automodule:: galaxy.external_services.result_handlers.basic
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.external_services.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.external_services.rst
@@ -0,0 +1,34 @@
+external_services Package
+=========================
+
+:mod:`actions` Module
+---------------------
+
+.. automodule:: galaxy.external_services.actions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`parameters` Module
+------------------------
+
+.. automodule:: galaxy.external_services.parameters
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`service` Module
+---------------------
+
+.. automodule:: galaxy.external_services.service
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.external_services.result_handlers
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.forms.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.forms.rst
@@ -0,0 +1,11 @@
+forms Package
+=============
+
+:mod:`forms` Module
+-------------------
+
+.. automodule:: galaxy.forms.forms
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.jobs.actions.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.actions.rst
@@ -0,0 +1,19 @@
+actions Package
+===============
+
+:mod:`actions` Package
+----------------------
+
+.. automodule:: galaxy.jobs.actions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`post` Module
+------------------
+
+.. automodule:: galaxy.jobs.actions.post
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.jobs.deferred.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.deferred.rst
@@ -0,0 +1,51 @@
+deferred Package
+================
+
+:mod:`deferred` Package
+-----------------------
+
+.. automodule:: galaxy.jobs.deferred
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`data_transfer` Module
+---------------------------
+
+.. automodule:: galaxy.jobs.deferred.data_transfer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`genome_transfer` Module
+-----------------------------
+
+.. automodule:: galaxy.jobs.deferred.genome_transfer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`liftover_transfer` Module
+-------------------------------
+
+.. automodule:: galaxy.jobs.deferred.liftover_transfer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`manual_data_transfer` Module
+----------------------------------
+
+.. automodule:: galaxy.jobs.deferred.manual_data_transfer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`pacific_biosciences_smrt_portal` Module
+---------------------------------------------
+
+.. automodule:: galaxy.jobs.deferred.pacific_biosciences_smrt_portal
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.jobs.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.rst
@@ -0,0 +1,45 @@
+jobs Package
+============
+
+:mod:`jobs` Package
+-------------------
+
+.. automodule:: galaxy.jobs
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`handler` Module
+---------------------
+
+.. automodule:: galaxy.jobs.handler
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`manager` Module
+---------------------
+
+.. automodule:: galaxy.jobs.manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`transfer_manager` Module
+------------------------------
+
+.. automodule:: galaxy.jobs.transfer_manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.jobs.actions
+ galaxy.jobs.deferred
+ galaxy.jobs.runners
+ galaxy.jobs.splitters
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.jobs.runners.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.rst
@@ -0,0 +1,59 @@
+runners Package
+===============
+
+:mod:`runners` Package
+----------------------
+
+.. automodule:: galaxy.jobs.runners
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`drmaa` Module
+-------------------
+
+.. automodule:: galaxy.jobs.runners.drmaa
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`local` Module
+-------------------
+
+.. automodule:: galaxy.jobs.runners.local
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`lwr` Module
+-----------------
+
+.. automodule:: galaxy.jobs.runners.lwr
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`pbs` Module
+-----------------
+
+.. automodule:: galaxy.jobs.runners.pbs
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sge` Module
+-----------------
+
+.. automodule:: galaxy.jobs.runners.sge
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tasks` Module
+-------------------
+
+.. automodule:: galaxy.jobs.runners.tasks
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.jobs.splitters.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.splitters.rst
@@ -0,0 +1,19 @@
+splitters Package
+=================
+
+:mod:`basic` Module
+-------------------
+
+.. automodule:: galaxy.jobs.splitters.basic
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`multi` Module
+-------------------
+
+.. automodule:: galaxy.jobs.splitters.multi
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.model.migrate.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.model.migrate.rst
@@ -0,0 +1,11 @@
+migrate Package
+===============
+
+:mod:`check` Module
+-------------------
+
+.. automodule:: galaxy.model.migrate.check
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.model.orm.ext.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.model.orm.ext.rst
@@ -0,0 +1,19 @@
+ext Package
+===========
+
+:mod:`ext` Package
+------------------
+
+.. automodule:: galaxy.model.orm.ext
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`assignmapper` Module
+--------------------------
+
+.. automodule:: galaxy.model.orm.ext.assignmapper
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.model.orm.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.model.orm.rst
@@ -0,0 +1,26 @@
+orm Package
+===========
+
+:mod:`orm` Package
+------------------
+
+.. automodule:: galaxy.model.orm
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`logging_connection_proxy` Module
+--------------------------------------
+
+.. automodule:: galaxy.model.orm.logging_connection_proxy
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.model.orm.ext
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.model.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.model.rst
@@ -0,0 +1,51 @@
+model Package
+=============
+
+:mod:`model` Package
+--------------------
+
+.. automodule:: galaxy.model
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`custom_types` Module
+--------------------------
+
+.. automodule:: galaxy.model.custom_types
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`item_attrs` Module
+------------------------
+
+.. automodule:: galaxy.model.item_attrs
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`mapping` Module
+---------------------
+
+.. automodule:: galaxy.model.mapping
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`mapping_tests` Module
+---------------------------
+
+.. automodule:: galaxy.model.mapping_tests
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.model.migrate
+ galaxy.model.orm
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.objectstore.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.objectstore.rst
@@ -0,0 +1,19 @@
+objectstore Package
+===================
+
+:mod:`objectstore` Package
+--------------------------
+
+.. automodule:: galaxy.objectstore
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`s3_multipart_upload` Module
+---------------------------------
+
+.. automodule:: galaxy.objectstore.s3_multipart_upload
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.openid.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.openid.rst
@@ -0,0 +1,19 @@
+openid Package
+==============
+
+:mod:`openid` Package
+---------------------
+
+.. automodule:: galaxy.openid
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`providers` Module
+-----------------------
+
+.. automodule:: galaxy.openid.providers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.quota.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.quota.rst
@@ -0,0 +1,11 @@
+quota Package
+=============
+
+:mod:`quota` Package
+--------------------
+
+.. automodule:: galaxy.quota
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.rst
@@ -0,0 +1,54 @@
+galaxy Package
+==============
+
+:mod:`galaxy` Package
+---------------------
+
+.. automodule:: galaxy
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`app` Module
+-----------------
+
+.. automodule:: galaxy.app
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`config` Module
+--------------------
+
+.. automodule:: galaxy.config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.actions
+ galaxy.datatypes
+ galaxy.eggs
+ galaxy.exceptions
+ galaxy.external_services
+ galaxy.forms
+ galaxy.jobs
+ galaxy.model
+ galaxy.objectstore
+ galaxy.openid
+ galaxy.quota
+ galaxy.sample_tracking
+ galaxy.security
+ galaxy.tags
+ galaxy.tool_shed
+ galaxy.tools
+ galaxy.util
+ galaxy.visualization
+ galaxy.web
+ galaxy.webapps
+ galaxy.workflow
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.sample_tracking.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.sample_tracking.rst
@@ -0,0 +1,35 @@
+sample_tracking Package
+=======================
+
+:mod:`data_transfer` Module
+---------------------------
+
+.. automodule:: galaxy.sample_tracking.data_transfer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`external_service_types` Module
+------------------------------------
+
+.. automodule:: galaxy.sample_tracking.external_service_types
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`request_types` Module
+---------------------------
+
+.. automodule:: galaxy.sample_tracking.request_types
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sample` Module
+--------------------
+
+.. automodule:: galaxy.sample_tracking.sample
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.security.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.security.rst
@@ -0,0 +1,19 @@
+security Package
+================
+
+:mod:`security` Package
+-----------------------
+
+.. automodule:: galaxy.security
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`validate_user_input` Module
+---------------------------------
+
+.. automodule:: galaxy.security.validate_user_input
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tags.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tags.rst
@@ -0,0 +1,19 @@
+tags Package
+============
+
+:mod:`tags` Package
+-------------------
+
+.. automodule:: galaxy.tags
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tag_handler` Module
+-------------------------
+
+.. automodule:: galaxy.tags.tag_handler
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tool_shed.migrate.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tool_shed.migrate.rst
@@ -0,0 +1,19 @@
+migrate Package
+===============
+
+:mod:`check` Module
+-------------------
+
+.. automodule:: galaxy.tool_shed.migrate.check
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`common` Module
+--------------------
+
+.. automodule:: galaxy.tool_shed.migrate.common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tool_shed.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tool_shed.rst
@@ -0,0 +1,51 @@
+tool_shed Package
+=================
+
+:mod:`tool_shed` Package
+------------------------
+
+.. automodule:: galaxy.tool_shed
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`encoding_util` Module
+---------------------------
+
+.. automodule:: galaxy.tool_shed.encoding_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`install_manager` Module
+-----------------------------
+
+.. automodule:: galaxy.tool_shed.install_manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tool_shed_registry` Module
+--------------------------------
+
+.. automodule:: galaxy.tool_shed.tool_shed_registry
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`update_manager` Module
+----------------------------
+
+.. automodule:: galaxy.tool_shed.update_manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.tool_shed.migrate
+ galaxy.tool_shed.tool_dependencies
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tool_shed.tool_dependencies.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tool_shed.tool_dependencies.rst
@@ -0,0 +1,27 @@
+tool_dependencies Package
+=========================
+
+:mod:`common_util` Module
+-------------------------
+
+.. automodule:: galaxy.tool_shed.tool_dependencies.common_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fabric_util` Module
+-------------------------
+
+.. automodule:: galaxy.tool_shed.tool_dependencies.fabric_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`install_util` Module
+--------------------------
+
+.. automodule:: galaxy.tool_shed.tool_dependencies.install_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.actions.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.actions.rst
@@ -0,0 +1,51 @@
+actions Package
+===============
+
+:mod:`actions` Package
+----------------------
+
+.. automodule:: galaxy.tools.actions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`history_imp_exp` Module
+-----------------------------
+
+.. automodule:: galaxy.tools.actions.history_imp_exp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`index_genome` Module
+--------------------------
+
+.. automodule:: galaxy.tools.actions.index_genome
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`metadata` Module
+----------------------
+
+.. automodule:: galaxy.tools.actions.metadata
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`upload` Module
+--------------------
+
+.. automodule:: galaxy.tools.actions.upload
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`upload_common` Module
+---------------------------
+
+.. automodule:: galaxy.tools.actions.upload_common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.data.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.data.rst
@@ -0,0 +1,11 @@
+data Package
+============
+
+:mod:`data` Package
+-------------------
+
+.. automodule:: galaxy.tools.data
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.deps.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.deps.rst
@@ -0,0 +1,19 @@
+deps Package
+============
+
+:mod:`deps` Package
+-------------------
+
+.. automodule:: galaxy.tools.deps
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tests` Module
+-------------------
+
+.. automodule:: galaxy.tools.deps.tests
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.genome_index.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.genome_index.rst
@@ -0,0 +1,19 @@
+genome_index Package
+====================
+
+:mod:`genome_index` Package
+---------------------------
+
+.. automodule:: galaxy.tools.genome_index
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`index_genome` Module
+--------------------------
+
+.. automodule:: galaxy.tools.genome_index.index_genome
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.imp_exp.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.imp_exp.rst
@@ -0,0 +1,27 @@
+imp_exp Package
+===============
+
+:mod:`imp_exp` Package
+----------------------
+
+.. automodule:: galaxy.tools.imp_exp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`export_history` Module
+----------------------------
+
+.. automodule:: galaxy.tools.imp_exp.export_history
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`unpack_tar_gz_archive` Module
+-----------------------------------
+
+.. automodule:: galaxy.tools.imp_exp.unpack_tar_gz_archive
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.parameters.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.parameters.rst
@@ -0,0 +1,67 @@
+parameters Package
+==================
+
+:mod:`parameters` Package
+-------------------------
+
+.. automodule:: galaxy.tools.parameters
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`basic` Module
+-------------------
+
+.. automodule:: galaxy.tools.parameters.basic
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`dynamic_options` Module
+-----------------------------
+
+.. automodule:: galaxy.tools.parameters.dynamic_options
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`grouping` Module
+----------------------
+
+.. automodule:: galaxy.tools.parameters.grouping
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`input_translation` Module
+-------------------------------
+
+.. automodule:: galaxy.tools.parameters.input_translation
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`output` Module
+--------------------
+
+.. automodule:: galaxy.tools.parameters.output
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sanitize` Module
+----------------------
+
+.. automodule:: galaxy.tools.parameters.sanitize
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`validation` Module
+------------------------
+
+.. automodule:: galaxy.tools.parameters.validation
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.rst
@@ -0,0 +1,41 @@
+tools Package
+=============
+
+:mod:`tools` Package
+--------------------
+
+.. automodule:: galaxy.tools
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`exception_handling` Module
+--------------------------------
+
+.. automodule:: galaxy.tools.exception_handling
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`test` Module
+------------------
+
+.. automodule:: galaxy.tools.test
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.tools.actions
+ galaxy.tools.data
+ galaxy.tools.deps
+ galaxy.tools.genome_index
+ galaxy.tools.imp_exp
+ galaxy.tools.parameters
+ galaxy.tools.search
+ galaxy.tools.util
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.search.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.search.rst
@@ -0,0 +1,11 @@
+search Package
+==============
+
+:mod:`search` Package
+---------------------
+
+.. automodule:: galaxy.tools.search
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.util.galaxyops.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.util.galaxyops.rst
@@ -0,0 +1,11 @@
+galaxyops Package
+=================
+
+:mod:`galaxyops` Package
+------------------------
+
+.. automodule:: galaxy.tools.util.galaxyops
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.tools.util.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.util.rst
@@ -0,0 +1,34 @@
+util Package
+============
+
+:mod:`util` Package
+-------------------
+
+.. automodule:: galaxy.tools.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hyphy_util` Module
+------------------------
+
+.. automodule:: galaxy.tools.util.hyphy_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`maf_utilities` Module
+---------------------------
+
+.. automodule:: galaxy.tools.util.maf_utilities
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.tools.util.galaxyops
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.util.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.util.rst
@@ -0,0 +1,139 @@
+util Package
+============
+
+:mod:`util` Package
+-------------------
+
+.. automodule:: galaxy.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`aliaspickler` Module
+--------------------------
+
+.. automodule:: galaxy.util.aliaspickler
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`bunch` Module
+-------------------
+
+.. automodule:: galaxy.util.bunch
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`expressions` Module
+-------------------------
+
+.. automodule:: galaxy.util.expressions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hash_util` Module
+-----------------------
+
+.. automodule:: galaxy.util.hash_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`heartbeat` Module
+-----------------------
+
+.. automodule:: galaxy.util.heartbeat
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`inflection` Module
+------------------------
+
+.. automodule:: galaxy.util.inflection
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`json` Module
+------------------
+
+.. automodule:: galaxy.util.json
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`lrucache` Module
+----------------------
+
+.. automodule:: galaxy.util.lrucache
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`memdump` Module
+---------------------
+
+.. automodule:: galaxy.util.memdump
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`none_like` Module
+-----------------------
+
+.. automodule:: galaxy.util.none_like
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`odict` Module
+-------------------
+
+.. automodule:: galaxy.util.odict
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sanitize_html` Module
+---------------------------
+
+.. automodule:: galaxy.util.sanitize_html
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`shed_util` Module
+-----------------------
+
+.. automodule:: galaxy.util.shed_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`streamball` Module
+------------------------
+
+.. automodule:: galaxy.util.streamball
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`template` Module
+----------------------
+
+.. automodule:: galaxy.util.template
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`topsort` Module
+---------------------
+
+.. automodule:: galaxy.util.topsort
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.visualization.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.rst
@@ -0,0 +1,26 @@
+visualization Package
+=====================
+
+:mod:`visualization` Package
+----------------------------
+
+.. automodule:: galaxy.visualization
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`genomes` Module
+---------------------
+
+.. automodule:: galaxy.visualization.genomes
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.visualization.tracks
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.visualization.tracks.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.tracks.rst
@@ -0,0 +1,35 @@
+tracks Package
+==============
+
+:mod:`tracks` Package
+---------------------
+
+.. automodule:: galaxy.visualization.tracks
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`data_providers` Module
+----------------------------
+
+.. automodule:: galaxy.visualization.tracks.data_providers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`summary` Module
+---------------------
+
+.. automodule:: galaxy.visualization.tracks.summary
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`visual_analytics` Module
+------------------------------
+
+.. automodule:: galaxy.visualization.tracks.visual_analytics
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.api.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.api.rst
@@ -0,0 +1,352 @@
+Galaxy API Documentation
+************************
+
+Background
+==========
+In addition to being accessible through a web interface, Galaxy can now also be
+accessed programmatically, through shell scripts and other programs. The web
+interface is appropriate for things like exploratory analysis, visualization,
+construction of workflows, and rerunning workflows on new datasets.
+
+The web interface is less suitable for things like
+ - Connecting a Galaxy instance directly to your sequencer and running
+ workflows whenever data is ready
+ - Running a workflow against multiple datasets (which can be done with the
+ web interface, but is tedious)
+ - When the analysis involves complex control, such as looping and
+ branching.
+
+The Galaxy API addresses these and other situations by exposing Galaxy
+internals through an additional interface, known as an Application Programming
+Interface, or API.
+
+Quickstart
+==========
+
+Set the following option in universe_wsgi.ini and start the server::
+
+ enable_api = True
+
+Log in as your user, navigate to the API Keys page in the User menu, and
+generate a new API key. Make a note of the API key, and then pull up a
+terminal. Now we'll use the display.py script in your galaxy/scripts/api
+directory for a short example::
+
+ % ./display.py my_key http://localhost:4096/api/histories
+ Collection Members
+ ------------------
+ #1: /api/histories/8c49be448cfe29bc
+ name: Unnamed history
+ id: 8c49be448cfe29bc
+ #2: /api/histories/33b43b4e7093c91f
+ name: output test
+ id: 33b43b4e7093c91f
+
+The result is a Collection of the histories of the user specified by the API
+key (you). To look at the details of a particular history, say #1 above, do
+the following::
+
+ % ./display.py my_key http://localhost:4096/api/histories/8c49be448cfe29bc
+ Member Information
+ ------------------
+ state_details: {'ok': 1, 'failed_metadata': 0, 'upload': 0, 'discarded': 0, 'running': 0, 'setting_metadata': 0, 'error': 0, 'new': 0, 'queued': 0, 'empty': 0}
+ state: ok
+ contents_url: /api/histories/8c49be448cfe29bc/contents
+ id: 8c49be448cfe29bc
+ name: Unnamed history
+
+This gives detailed information about the specific member in question, in this
+case the History. To view history contents, do the following::
+
+
+ % ./display.py my_key http://localhost:4096/api/histories/8c49be448cfe29bc/contents
+ Collection Members
+ ------------------
+ #1: /api/histories/8c49be448cfe29bc/contents/6f91353f3eb0fa4a
+ name: Pasted Entry
+ type: file
+ id: 6f91353f3eb0fa4a
+
+What we have here is another Collection of items containing all of the datasets
+in this particular history. Finally, to view details of a particular dataset
+in this collection, execute the following::
+
+ % ./display.py my_key http://localhost:4096/api/histories/8c49be448cfe29bc/contents/6f91353f3eb0f…
+ Member Information
+ ------------------
+ misc_blurb: 1 line
+ name: Pasted Entry
+ data_type: txt
+ deleted: False
+ file_name: /Users/yoplait/work/galaxy-stock/database/files/000/dataset_82.dat
+ state: ok
+ download_url: /datasets/6f91353f3eb0fa4a/display?to_ext=txt
+ visible: True
+ genome_build: ?
+ model_class: HistoryDatasetAssociation
+ file_size: 17
+ metadata_data_lines: 1
+ id: 6f91353f3eb0fa4a
+ misc_info: uploaded txt file
+ metadata_dbkey: ?
+
+And now you've successfully used the API to request and select a history,
+browse the contents of that history, and then look at detailed information
+about a particular dataset.
+
+For a more comprehensive Data Library example, set the following option in your
+universe_wsgi.ini as well, and restart galaxy again::
+
+ admin_users = you(a)example.org
+ library_import_dir = /path/to/some/directory
+
+In the directory you specified for 'library_import_dir', create some
+subdirectories, and put (or symlink) files to import into Galaxy into those
+subdirectories.
+
+In Galaxy, create an account that matches the address you put in 'admin_users',
+then browse to that user's preferences and generate a new API Key. Copy the
+key to your clipboard and then use these scripts::
+
+ % ./display.py my_key http://localhost:4096/api/libraries
+ Collection Members
+ ------------------
+
+ 0 elements in collection
+
+ % ./library_create_library.py my_key http://localhost:4096/api/libraries api_test 'API Test Library'
+ Response
+ --------
+ /api/libraries/f3f73e481f432006
+ name: api_test
+ id: f3f73e481f432006
+
+ % ./display.py my_key http://localhost:4096/api/libraries
+ Collection Members
+ ------------------
+ /api/libraries/f3f73e481f432006
+ name: api_test
+ id: f3f73e481f432006
+
+ % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006
+ Member Information
+ ------------------
+ synopsis: None
+ contents_url: /api/libraries/f3f73e481f432006/contents
+ description: API Test Library
+ name: api_test
+
+ % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents
+ Collection Members
+ ------------------
+ /api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+ name: /
+ type: folder
+ id: 28202595c0d2591f61ddda595d2c3670
+
+ % ./library_create_folder.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591f61ddda595d2c3670 api_test_folder1 'API Test Folder 1'
+ Response
+ --------
+ /api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+ name: api_test_folder1
+ id: 28202595c0d2591fa4f9089d2303fd89
+
+ % ./library_upload_from_import_dir.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591fa4f9089d2303fd89 bed bed hg19
+ Response
+ --------
+ /api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+ name: 2.bed
+ id: e9ef7fdb2db87d7b
+ /api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+ name: 3.bed
+ id: 3b7f6a31f80a5018
+
+ % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents
+ Collection Members
+ ------------------
+ /api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+ name: /
+ type: folder
+ id: 28202595c0d2591f61ddda595d2c3670
+ /api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+ name: /api_test_folder1
+ type: folder
+ id: 28202595c0d2591fa4f9089d2303fd89
+ /api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+ name: /api_test_folder1/2.bed
+ type: file
+ id: e9ef7fdb2db87d7b
+ /api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+ name: /api_test_folder1/3.bed
+ type: file
+ id: 3b7f6a31f80a5018
+
+ % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87…
+ Member Information
+ ------------------
+ misc_blurb: 68 regions
+ metadata_endCol: 3
+ data_type: bed
+ metadata_columns: 6
+ metadata_nameCol: 4
+ uploaded_by: nate@...
+ metadata_strandCol: 6
+ name: 2.bed
+ genome_build: hg19
+ metadata_comment_lines: None
+ metadata_startCol: 2
+ metadata_chromCol: 1
+ file_size: 4272
+ metadata_data_lines: 68
+ message:
+ metadata_dbkey: hg19
+ misc_info: uploaded bed file
+ date_uploaded: 2010-06-22T17:01:51.266119
+ metadata_column_types: str, int, int, str, int, str
+
+Other parameters are valid when uploading, they are the same parameters as are
+used in the web form, like 'link_data_only' and etc.
+
+The request and response format should be considered alpha and are subject to change.
+
+
+API Controllers
+===============
+
+:mod:`datasets` Module
+----------------------
+
+.. automodule:: galaxy.web.api.datasets
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`forms` Module
+-------------------
+
+.. automodule:: galaxy.web.api.forms
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`genomes` Module
+---------------------
+
+.. automodule:: galaxy.web.api.genomes
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`histories` Module
+-----------------------
+
+.. automodule:: galaxy.web.api.histories
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`history_contents` Module
+------------------------------
+
+.. automodule:: galaxy.web.api.history_contents
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`libraries` Module
+-----------------------
+
+.. automodule:: galaxy.web.api.libraries
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`library_contents` Module
+------------------------------
+
+.. automodule:: galaxy.web.api.library_contents
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`permissions` Module
+-------------------------
+
+.. automodule:: galaxy.web.api.permissions
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`quotas` Module
+--------------------
+
+.. automodule:: galaxy.web.api.quotas
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`request_types` Module
+---------------------------
+
+.. automodule:: galaxy.web.api.request_types
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`requests` Module
+----------------------
+
+.. automodule:: galaxy.web.api.requests
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`roles` Module
+-------------------
+
+.. automodule:: galaxy.web.api.roles
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`samples` Module
+---------------------
+
+.. automodule:: galaxy.web.api.samples
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tools` Module
+-------------------
+
+.. automodule:: galaxy.web.api.tools
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`users` Module
+-------------------
+
+.. automodule:: galaxy.web.api.users
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`visualizations` Module
+----------------------------
+
+.. automodule:: galaxy.web.api.visualizations
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`workflows` Module
+-----------------------
+
+.. automodule:: galaxy.web.api.workflows
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.base.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.base.rst
@@ -0,0 +1,11 @@
+base Package
+============
+
+:mod:`controller` Module
+------------------------
+
+.. automodule:: galaxy.web.base.controller
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.controllers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.controllers.rst
@@ -0,0 +1,235 @@
+controllers Package
+===================
+
+:mod:`controllers` Package
+--------------------------
+
+.. automodule:: galaxy.web.controllers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`admin` Module
+-------------------
+
+.. automodule:: galaxy.web.controllers.admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`admin_toolshed` Module
+----------------------------
+
+.. automodule:: galaxy.web.controllers.admin_toolshed
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`async` Module
+-------------------
+
+.. automodule:: galaxy.web.controllers.async
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`cloud` Module
+-------------------
+
+.. automodule:: galaxy.web.controllers.cloud
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`data_admin` Module
+------------------------
+
+.. automodule:: galaxy.web.controllers.data_admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`dataset` Module
+---------------------
+
+.. automodule:: galaxy.web.controllers.dataset
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`error` Module
+-------------------
+
+.. automodule:: galaxy.web.controllers.error
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`external_service` Module
+------------------------------
+
+.. automodule:: galaxy.web.controllers.external_service
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`external_services` Module
+-------------------------------
+
+.. automodule:: galaxy.web.controllers.external_services
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`forms` Module
+-------------------
+
+.. automodule:: galaxy.web.controllers.forms
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`history` Module
+---------------------
+
+.. automodule:: galaxy.web.controllers.history
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`library` Module
+---------------------
+
+.. automodule:: galaxy.web.controllers.library
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`library_admin` Module
+---------------------------
+
+.. automodule:: galaxy.web.controllers.library_admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`library_common` Module
+----------------------------
+
+.. automodule:: galaxy.web.controllers.library_common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`mobile` Module
+--------------------
+
+.. automodule:: galaxy.web.controllers.mobile
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`page` Module
+------------------
+
+.. automodule:: galaxy.web.controllers.page
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`request_type` Module
+--------------------------
+
+.. automodule:: galaxy.web.controllers.request_type
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`requests` Module
+----------------------
+
+.. automodule:: galaxy.web.controllers.requests
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`requests_admin` Module
+----------------------------
+
+.. automodule:: galaxy.web.controllers.requests_admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`requests_common` Module
+-----------------------------
+
+.. automodule:: galaxy.web.controllers.requests_common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`root` Module
+------------------
+
+.. automodule:: galaxy.web.controllers.root
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tag` Module
+-----------------
+
+.. automodule:: galaxy.web.controllers.tag
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tool_runner` Module
+-------------------------
+
+.. automodule:: galaxy.web.controllers.tool_runner
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`tracks` Module
+--------------------
+
+.. automodule:: galaxy.web.controllers.tracks
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`ucsc_proxy` Module
+------------------------
+
+.. automodule:: galaxy.web.controllers.ucsc_proxy
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`user` Module
+------------------
+
+.. automodule:: galaxy.web.controllers.user
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`visualization` Module
+---------------------------
+
+.. automodule:: galaxy.web.controllers.visualization
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`workflow` Module
+----------------------
+
+.. automodule:: galaxy.web.controllers.workflow
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.framework.helpers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.framework.helpers.rst
@@ -0,0 +1,19 @@
+helpers Package
+===============
+
+:mod:`helpers` Package
+----------------------
+
+.. automodule:: galaxy.web.framework.helpers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`grids` Module
+-------------------
+
+.. automodule:: galaxy.web.framework.helpers.grids
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.framework.middleware.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.framework.middleware.rst
@@ -0,0 +1,51 @@
+middleware Package
+==================
+
+:mod:`middleware` Package
+-------------------------
+
+.. automodule:: galaxy.web.framework.middleware
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`profile` Module
+---------------------
+
+.. automodule:: galaxy.web.framework.middleware.profile
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`remoteuser` Module
+------------------------
+
+.. automodule:: galaxy.web.framework.middleware.remoteuser
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`static` Module
+--------------------
+
+.. automodule:: galaxy.web.framework.middleware.static
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`translogger` Module
+-------------------------
+
+.. automodule:: galaxy.web.framework.middleware.translogger
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`xforwardedhost` Module
+----------------------------
+
+.. automodule:: galaxy.web.framework.middleware.xforwardedhost
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.framework.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.framework.rst
@@ -0,0 +1,35 @@
+framework Package
+=================
+
+:mod:`framework` Package
+------------------------
+
+.. automodule:: galaxy.web.framework
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`base` Module
+------------------
+
+.. automodule:: galaxy.web.framework.base
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`openid_manager` Module
+----------------------------
+
+.. automodule:: galaxy.web.framework.openid_manager
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.web.framework.helpers
+ galaxy.web.framework.middleware
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.rst
@@ -0,0 +1,46 @@
+web Package
+===========
+
+:mod:`web` Package
+------------------
+
+.. automodule:: galaxy.web
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`buildapp` Module
+----------------------
+
+.. automodule:: galaxy.web.buildapp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`form_builder` Module
+--------------------------
+
+.. automodule:: galaxy.web.form_builder
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`params` Module
+--------------------
+
+.. automodule:: galaxy.web.params
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.web.api
+ galaxy.web.base
+ galaxy.web.controllers
+ galaxy.web.framework
+ galaxy.web.security
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.web.security.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.web.security.rst
@@ -0,0 +1,11 @@
+security Package
+================
+
+:mod:`security` Package
+-----------------------
+
+.. automodule:: galaxy.web.security
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.controllers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.controllers.rst
@@ -0,0 +1,59 @@
+controllers Package
+===================
+
+:mod:`controllers` Package
+--------------------------
+
+.. automodule:: galaxy.webapps.community.controllers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`admin` Module
+-------------------
+
+.. automodule:: galaxy.webapps.community.controllers.admin
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`common` Module
+--------------------
+
+.. automodule:: galaxy.webapps.community.controllers.common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hg` Module
+----------------
+
+.. automodule:: galaxy.webapps.community.controllers.hg
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`repository` Module
+------------------------
+
+.. automodule:: galaxy.webapps.community.controllers.repository
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`upload` Module
+--------------------
+
+.. automodule:: galaxy.webapps.community.controllers.upload
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`workflow` Module
+----------------------
+
+.. automodule:: galaxy.webapps.community.controllers.workflow
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.framework.middleware.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.framework.middleware.rst
@@ -0,0 +1,19 @@
+middleware Package
+==================
+
+:mod:`middleware` Package
+-------------------------
+
+.. automodule:: galaxy.webapps.community.framework.middleware
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hg` Module
+----------------
+
+.. automodule:: galaxy.webapps.community.framework.middleware.hg
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.framework.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.framework.rst
@@ -0,0 +1,18 @@
+framework Package
+=================
+
+:mod:`framework` Package
+------------------------
+
+.. automodule:: galaxy.webapps.community.framework
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.community.framework.middleware
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.model.migrate.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.model.migrate.rst
@@ -0,0 +1,11 @@
+migrate Package
+===============
+
+:mod:`check` Module
+-------------------
+
+.. automodule:: galaxy.webapps.community.model.migrate.check
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.model.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.model.rst
@@ -0,0 +1,26 @@
+model Package
+=============
+
+:mod:`model` Package
+--------------------
+
+.. automodule:: galaxy.webapps.community.model
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`mapping` Module
+---------------------
+
+.. automodule:: galaxy.webapps.community.model.mapping
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.community.model.migrate
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.rst
@@ -0,0 +1,45 @@
+community Package
+=================
+
+:mod:`community` Package
+------------------------
+
+.. automodule:: galaxy.webapps.community
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`app` Module
+-----------------
+
+.. automodule:: galaxy.webapps.community.app
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`buildapp` Module
+----------------------
+
+.. automodule:: galaxy.webapps.community.buildapp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`config` Module
+--------------------
+
+.. automodule:: galaxy.webapps.community.config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.community.controllers
+ galaxy.webapps.community.framework
+ galaxy.webapps.community.model
+ galaxy.webapps.community.security
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.community.security.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.security.rst
@@ -0,0 +1,11 @@
+security Package
+================
+
+:mod:`security` Package
+-----------------------
+
+.. automodule:: galaxy.webapps.community.security
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.demo_sequencer.controllers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.demo_sequencer.controllers.rst
@@ -0,0 +1,19 @@
+controllers Package
+===================
+
+:mod:`controllers` Package
+--------------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.controllers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`common` Module
+--------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.controllers.common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.demo_sequencer.framework.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.demo_sequencer.framework.rst
@@ -0,0 +1,11 @@
+framework Package
+=================
+
+:mod:`framework` Package
+------------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.framework
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.demo_sequencer.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.demo_sequencer.rst
@@ -0,0 +1,51 @@
+demo_sequencer Package
+======================
+
+:mod:`demo_sequencer` Package
+-----------------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`app` Module
+-----------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.app
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`buildapp` Module
+----------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.buildapp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`config` Module
+--------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`registry` Module
+----------------------
+
+.. automodule:: galaxy.webapps.demo_sequencer.registry
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.demo_sequencer.controllers
+ galaxy.webapps.demo_sequencer.framework
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.reports.controllers.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.reports.controllers.rst
@@ -0,0 +1,59 @@
+controllers Package
+===================
+
+:mod:`controllers` Package
+--------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`jobs` Module
+------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.jobs
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`root` Module
+------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.root
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sample_tracking` Module
+-----------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.sample_tracking
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`system` Module
+--------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.system
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`users` Module
+-------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.users
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`workflows` Module
+-----------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.workflows
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.reports.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.reports.rst
@@ -0,0 +1,42 @@
+reports Package
+===============
+
+:mod:`reports` Package
+----------------------
+
+.. automodule:: galaxy.webapps.reports
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`app` Module
+-----------------
+
+.. automodule:: galaxy.webapps.reports.app
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`buildapp` Module
+----------------------
+
+.. automodule:: galaxy.webapps.reports.buildapp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`config` Module
+--------------------
+
+.. automodule:: galaxy.webapps.reports.config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.reports.controllers
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.webapps.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.rst
@@ -0,0 +1,20 @@
+webapps Package
+===============
+
+:mod:`webapps` Package
+----------------------
+
+.. automodule:: galaxy.webapps
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy.webapps.community
+ galaxy.webapps.demo_sequencer
+ galaxy.webapps.reports
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy.workflow.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.workflow.rst
@@ -0,0 +1,11 @@
+workflow Package
+================
+
+:mod:`modules` Module
+---------------------
+
+.. automodule:: galaxy.workflow.modules
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy_utils.rst
--- /dev/null
+++ b/doc/source/lib/galaxy_utils.rst
@@ -0,0 +1,10 @@
+galaxy_utils Package
+====================
+
+Subpackages
+-----------
+
+.. toctree::
+
+ galaxy_utils.sequence
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/galaxy_utils.sequence.rst
--- /dev/null
+++ b/doc/source/lib/galaxy_utils.sequence.rst
@@ -0,0 +1,43 @@
+sequence Package
+================
+
+:mod:`fasta` Module
+-------------------
+
+.. automodule:: galaxy_utils.sequence.fasta
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`fastq` Module
+-------------------
+
+.. automodule:: galaxy_utils.sequence.fastq
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`sequence` Module
+----------------------
+
+.. automodule:: galaxy_utils.sequence.sequence
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`transform` Module
+-----------------------
+
+.. automodule:: galaxy_utils.sequence.transform
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`vcf` Module
+-----------------
+
+.. automodule:: galaxy_utils.sequence.vcf
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/log_tempfile.rst
--- /dev/null
+++ b/doc/source/lib/log_tempfile.rst
@@ -0,0 +1,7 @@
+log_tempfile Module
+===================
+
+.. automodule:: log_tempfile
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/mimeparse.rst
--- /dev/null
+++ b/doc/source/lib/mimeparse.rst
@@ -0,0 +1,7 @@
+mimeparse Module
+================
+
+.. automodule:: mimeparse
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/modules.rst
--- /dev/null
+++ b/doc/source/lib/modules.rst
@@ -0,0 +1,13 @@
+lib
+===
+
+.. toctree::
+ :maxdepth: 4
+
+ fpconst
+ galaxy
+ galaxy_utils
+ log_tempfile
+ mimeparse
+ pkg_resources
+ psyco_full
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/pkg_resources.rst
--- /dev/null
+++ b/doc/source/lib/pkg_resources.rst
@@ -0,0 +1,7 @@
+pkg_resources Module
+====================
+
+.. automodule:: pkg_resources
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff -r 4fbd05095ca70adf740ed635451c0ec876635f50 -r 77ceb114bf57172579310a0b4347da0f5f46dec6 doc/source/lib/psyco_full.rst
--- /dev/null
+++ b/doc/source/lib/psyco_full.rst
@@ -0,0 +1,7 @@
+psyco_full Module
+=================
+
+.. automodule:: psyco_full
+ :members:
+ :undoc-members:
+ :show-inheritance:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Infrastructure for running tools: (a) move rerunning tools from tracks controller into tools (API) controller; (b) rerunning now supports multiple regions.
by Bitbucket 12 Jun '12
by Bitbucket 12 Jun '12
12 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4fbd05095ca7/
changeset: 4fbd05095ca7
user: jgoecks
date: 2012-06-12 23:37:58
summary: Infrastructure for running tools: (a) move rerunning tools from tracks controller into tools (API) controller; (b) rerunning now supports multiple regions.
affected #: 11 files
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2617,7 +2617,7 @@
elif isinstance( input, SelectToolParameter ):
param_dict.update( { 'type' : 'select',
'html' : urllib.quote( input.get_html( trans ) ),
- 'options': input.static_options
+ 'options': input.static_options
} )
elif isinstance( input, Conditional ):
# TODO.
@@ -2626,7 +2626,8 @@
param_dict.update( { 'type' : 'number', 'init_value' : input.value,
'html' : urllib.quote( input.get_html( trans ) ),
'min': input.min,
- 'max': input.max
+ 'max': input.max,
+ 'value': input.value
} )
else:
param_dict.update( { 'type' : '??', 'init_value' : input.value, \
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -23,6 +23,26 @@
return dbkey.split( ':' )
else:
return None, dbkey
+
+class GenomeRegion( object ):
+ """
+ A genomic region on an individual chromosome.
+ """
+
+ def __init__( self, chrom=None, start=None, end=None ):
+ self.chrom = chrom
+ self.start = int( start )
+ self.end = int( end )
+
+ def __str__( self ):
+ return self.chrom + ":" + str( self.start ) + "-" + str( self.end )
+
+ @staticmethod
+ def from_dict( obj_dict ):
+ return GenomeRegion( chrom=obj_dict[ 'chrom' ],
+ start=obj_dict[ 'start' ],
+ end=obj_dict[ 'end' ] )
+
class Genome( object ):
"""
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -63,7 +63,7 @@
self.original_dataset = original_dataset
self.dependencies = dependencies
- def write_data_to_file( self, chrom, start, end, filename ):
+ def write_data_to_file( self, regions, filename ):
"""
Write data in region defined by chrom, start, and end to a file.
"""
@@ -257,11 +257,18 @@
return tabix.fetch(reference=chrom, start=start, end=end)
- def write_data_to_file( self, chrom, start, end, filename ):
- iterator = self.get_iterator( chrom, start, end )
+ def write_data_to_file( self, regions, filename ):
out = open( filename, "w" )
- for line in iterator:
- out.write( "%s\n" % line )
+
+ for region in regions:
+ # Write data in region.
+ chrom = region.chrom
+ start = region.start
+ end = region.end
+ iterator = self.get_iterator( chrom, start, end )
+ for line in iterator:
+ out.write( "%s\n" % line )
+
out.close()
#
@@ -332,7 +339,7 @@
return { 'data': rval, 'message': message }
- def write_data_to_file( self, chrom, start, end, filename ):
+ def write_data_to_file( self, regions, filename ):
raise Exception( "Unimplemented Function" )
class IntervalTabixDataProvider( TabixDataProvider, IntervalDataProvider ):
@@ -420,11 +427,18 @@
return { 'data': rval, 'message': message }
- def write_data_to_file( self, chrom, start, end, filename ):
- iterator = self.get_iterator( chrom, start, end )
+ def write_data_to_file( self, regions, filename ):
out = open( filename, "w" )
- for line in iterator:
- out.write( "%s\n" % line )
+
+ for region in regions:
+ # Write data in region.
+ chrom = region.chrom
+ start = region.start
+ end = region.end
+ iterator = self.get_iterator( chrom, start, end )
+ for line in iterator:
+ out.write( "%s\n" % line )
+
out.close()
class BedTabixDataProvider( TabixDataProvider, BedDataProvider ):
@@ -545,11 +559,17 @@
return { 'data': rval, 'message': message }
- def write_data_to_file( self, chrom, start, end, filename ):
- iterator = self.get_iterator( chrom, start, end )
+ def write_data_to_file( self, regions, filename ):
out = open( filename, "w" )
- for line in iterator:
- out.write( "%s\n" % line )
+
+ for region in regions:
+ # Write data in region.
+ chrom = region.chrom
+ start = region.start
+ end = region.end
+ iterator = self.get_iterator( chrom, start, end )
+ for line in iterator:
+ out.write( "%s\n" % line )
out.close()
class VcfTabixDataProvider( TabixDataProvider, VcfDataProvider ):
@@ -669,35 +689,42 @@
return filters
- def write_data_to_file( self, chrom, start, end, filename ):
+ def write_data_to_file( self, regions, filename ):
"""
- Write reads in [chrom:start-end] to file.
+ Write reads in regions to file.
"""
# Open current BAM file using index.
- start, end = int(start), int(end)
bamfile = csamtools.Samfile( filename=self.original_dataset.file_name, mode='rb', \
index_filename=self.converted_dataset.file_name )
- try:
- data = bamfile.fetch(start=start, end=end, reference=chrom)
- except ValueError, e:
- # Some BAM files do not prefix chromosome names with chr, try without
- if chrom.startswith( 'chr' ):
- try:
- data = bamfile.fetch( start=start, end=end, reference=chrom[3:] )
- except ValueError:
- return None
- else:
- return None
-
- # Write new BAM file.
+
# TODO: write headers as well?
new_bamfile = csamtools.Samfile( template=bamfile, filename=filename, mode='wb' )
- for i, read in enumerate( data ):
- new_bamfile.write( read )
- new_bamfile.close()
+
+ for region in regions:
+ # Write data from region.
+ chrom = region.chrom
+ start = region.start
+ end = region.end
+
+ try:
+ data = bamfile.fetch(start=start, end=end, reference=chrom)
+ except ValueError, e:
+ # Some BAM files do not prefix chromosome names with chr, try without
+ if chrom.startswith( 'chr' ):
+ try:
+ data = bamfile.fetch( start=start, end=end, reference=chrom[3:] )
+ except ValueError:
+ return None
+ else:
+ return None
+
+ # Write reads in region.
+ for i, read in enumerate( data ):
+ new_bamfile.write( read )
# Cleanup.
+ new_bamfile.close()
bamfile.close()
def get_iterator( self, chrom, start, end ):
@@ -952,17 +979,24 @@
"""
col_name_data_attr_mapping = { 4 : { 'index': 4 , 'name' : 'Score' } }
- def write_data_to_file( self, chrom, start, end, filename ):
+ def write_data_to_file( self, regions, filename ):
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
out = open( filename, 'w' )
- for start, end, offset in index.find(chrom, start, end):
- source.seek( offset )
+
+ for region in regions:
+ # Write data from region.
+ chrom = region.chrom
+ start = region.start
+ end = region.end
+ for start, end, offset in index.find(chrom, start, end):
+ source.seek( offset )
- reader = GFFReaderWrapper( source, fix_strand=True )
- feature = reader.next()
- for interval in feature.intervals:
- out.write( '\t'.join( interval.fields ) + '\n' )
+ reader = GFFReaderWrapper( source, fix_strand=True )
+ feature = reader.next()
+ for interval in feature.intervals:
+ out.write( '\t'.join( interval.fields ) + '\n' )
+
out.close()
def get_iterator( self, chrom, start, end ):
@@ -1183,13 +1217,6 @@
rval.append( payload )
return { 'data': rval, 'message': message }
-
- def write_data_to_file( self, chrom, start, end, filename ):
- iterator = self.get_iterator( chrom, start, end )
- out = open( filename, "w" )
- for line in iterator:
- out.write( "%s\n" % line )
- out.close()
class ENCODEPeakTabixDataProvider( TabixDataProvider, ENCODEPeakDataProvider ):
"""
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/api/datasets.py
--- a/lib/galaxy/web/api/datasets.py
+++ b/lib/galaxy/web/api/datasets.py
@@ -10,7 +10,7 @@
log = logging.getLogger( __name__ )
-class DatasetsController( BaseAPIController, UsesHistoryMixinDatasetAssociationMixin ):
+class DatasetsController( BaseAPIController, UsesHistoryDatasetAssociationMixin ):
@web.expose_api
def index( self, trans, hda_id, **kwd ):
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/api/history_contents.py
--- a/lib/galaxy/web/api/history_contents.py
+++ b/lib/galaxy/web/api/history_contents.py
@@ -12,7 +12,7 @@
log = logging.getLogger( __name__ )
-class HistoryContentsController( BaseAPIController, UsesHistoryMixinDatasetAssociationMixin, UsesHistoryMixin, UsesLibraryMixin, UsesLibraryMixinItems ):
+class HistoryContentsController( BaseAPIController, UsesHistoryDatasetAssociationMixin, UsesHistoryMixin, UsesLibraryMixin, UsesLibraryMixinItems ):
@web.expose_api
def index( self, trans, history_id, **kwd ):
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/api/tools.py
--- a/lib/galaxy/web/api/tools.py
+++ b/lib/galaxy/web/api/tools.py
@@ -1,12 +1,12 @@
from galaxy import config, tools, web, util
-from galaxy.web.base.controller import BaseController, BaseAPIController
+from galaxy.web.base.controller import BaseController, BaseAPIController, UsesHistoryDatasetAssociationMixin, messages, get_highest_priority_msg
from galaxy.util.bunch import Bunch
+from galaxy.visualization.tracks.visual_analytics import get_dataset_job
+from galaxy.visualization.genomes import GenomeRegion
+from galaxy.util.json import to_json_string, from_json_string
+from galaxy.visualization.tracks.data_providers import *
-messages = Bunch(
- NO_TOOL = "no tool"
-)
-
-class ToolsController( BaseAPIController ):
+class ToolsController( BaseAPIController, UsesHistoryDatasetAssociationMixin ):
"""
RESTful controller for interactions with tools.
"""
@@ -29,7 +29,7 @@
# Create return value.
return self.app.toolbox.to_dict( trans, in_panel=in_panel, trackster=trackster )
- @web.json
+ @web.expose_api
def show( self, trans, id, **kwd ):
"""
GET /api/tools/{tool_id}
@@ -41,16 +41,18 @@
def create( self, trans, payload, **kwd ):
"""
POST /api/tools
- Executes tool using specified inputs, creating new history-dataset
- associations, which are returned.
+ Executes tool using specified inputs and returns tool's outputs.
"""
- # TODO: set target history?
+ # HACK: for now, if action is rerun, rerun tool.
+ action = payload.get( 'action', None )
+ if action == 'rerun':
+ return self._rerun_tool( trans, payload, **kwd )
# -- Execute tool. --
# Get tool.
- tool_id = payload[ 'id' ]
+ tool_id = payload[ 'tool_id' ]
tool = trans.app.toolbox.get_tool( tool_id )
if not tool:
return { "message": { "type": "error", "text" : messages.NO_TOOL } }
@@ -72,4 +74,287 @@
for output in output_datasets:
outputs.append( output.get_api_value() )
return rval
-
\ No newline at end of file
+
+ #
+ # -- Helper methods --
+ #
+
+ def _run_tool( self, trans, tool_id, target_dataset_id, **kwargs ):
+ """
+ Run a tool. This method serves as a general purpose way to run tools asynchronously.
+ """
+
+ #
+ # Set target history (the history that tool will use for outputs) using
+ # target dataset. If user owns dataset, put new data in original
+ # dataset's history; if user does not own dataset (and hence is accessing
+ # dataset via sharing), put new data in user's current history.
+ #
+ target_dataset = self.get_dataset( trans, target_dataset_id, check_ownership=False, check_accessible=True )
+ if target_dataset.history.user == trans.user:
+ target_history = target_dataset.history
+ else:
+ target_history = trans.get_history( create=True )
+
+ # HACK: tools require unencoded parameters but kwargs are typically
+ # encoded, so try decoding all parameter values.
+ for key, value in kwargs.items():
+ try:
+ value = trans.security.decode_id( value )
+ kwargs[ key ] = value
+ except:
+ pass
+
+ #
+ # Execute tool.
+ #
+ tool = trans.app.toolbox.get_tool( tool_id )
+ if not tool:
+ return messages.NO_TOOL
+
+ # HACK: add run button so that tool.handle_input will run tool.
+ kwargs['runtool_btn'] = 'Execute'
+ params = util.Params( kwargs, sanitize = False )
+ template, vars = tool.handle_input( trans, params.__dict__, history=target_history )
+
+ # TODO: check for errors and ensure that output dataset is available.
+ output_datasets = vars[ 'out_data' ].values()
+ return self.add_track_async( trans, output_datasets[0].id )
+
+
+ def _rerun_tool( self, trans, payload, **kwargs ):
+ """
+ Rerun a tool to produce a new output dataset that corresponds to a
+ dataset that a user is currently viewing.
+ """
+
+ #
+ # TODO: refactor to use same code as run_tool.
+ #
+
+ # Run tool on region if region is specificied.
+ run_on_regions = False
+ regions = from_json_string( payload.get( 'regions', None ) )
+ print regions, payload
+ if regions:
+ if isinstance( regions, dict ):
+ # Regions is a single region.
+ regions = [ GenomeRegion.from_dict( regions ) ]
+ elif isinstance( regions, list ):
+ # There is a list of regions.
+ regions = [ GenomeRegion.from_dict( r ) for r in regions ]
+ run_on_regions = True
+
+ # Dataset check.
+ original_dataset = self.get_dataset( trans, payload[ 'target_dataset_id' ], check_ownership=False, check_accessible=True )
+ msg = self.check_dataset_state( trans, original_dataset )
+ if msg:
+ return to_json_string( msg )
+
+ #
+ # Set tool parameters--except non-hidden dataset parameters--using combination of
+ # job's previous parameters and incoming parameters. Incoming parameters
+ # have priority.
+ #
+ original_job = get_dataset_job( original_dataset )
+ tool = trans.app.toolbox.get_tool( original_job.tool_id )
+ if not tool:
+ return messages.NO_TOOL
+ tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
+ # TODO: need to handle updates to conditional parameters; conditional
+ # params are stored in dicts (and dicts within dicts).
+ tool_params.update( dict( [ ( key, value ) for key, value in kwargs.items() if key in tool.inputs ] ) )
+ tool_params = tool.params_from_strings( tool_params, self.app )
+
+ #
+ # If running tool on region, convert input datasets (create indices) so
+ # that can regions of data can be quickly extracted.
+ #
+ messages_list = []
+ if run_on_regions:
+ for jida in original_job.input_datasets:
+ input_dataset = jida.dataset
+ if get_data_provider( original_dataset=input_dataset ):
+ # Can index dataset.
+ track_type, data_sources = input_dataset.datatype.get_track_type()
+ # Convert to datasource that provides 'data' because we need to
+ # extract the original data.
+ data_source = data_sources[ 'data' ]
+ msg = self.convert_dataset( trans, input_dataset, data_source )
+ if msg is not None:
+ messages_list.append( msg )
+
+ # Return any messages generated during conversions.
+ return_message = get_highest_priority_msg( messages_list )
+ if return_message:
+ return to_json_string( return_message )
+
+ #
+ # Set target history (the history that tool will use for inputs/outputs).
+ # If user owns dataset, put new data in original dataset's history; if
+ # user does not own dataset (and hence is accessing dataset via sharing),
+ # put new data in user's current history.
+ #
+ if original_dataset.history.user == trans.user:
+ target_history = original_dataset.history
+ else:
+ target_history = trans.get_history( create=True )
+ hda_permissions = trans.app.security_agent.history_get_default_permissions( target_history )
+
+ def set_param_value( param_dict, param_name, param_value ):
+ """
+ Set new parameter value in a tool's parameter dictionary.
+ """
+
+ # Recursive function to set param value.
+ def set_value( param_dict, group_name, group_index, param_name, param_value ):
+ if group_name in param_dict:
+ param_dict[ group_name ][ group_index ][ param_name ] = param_value
+ return True
+ elif param_name in param_dict:
+ param_dict[ param_name ] = param_value
+ return True
+ else:
+ # Recursive search.
+ return_val = False
+ for name, value in param_dict.items():
+ if isinstance( value, dict ):
+ return_val = set_value( value, group_name, group_index, param_name, param_value)
+ if return_val:
+ return return_val
+ return False
+
+ # Parse parameter name if necessary.
+ if param_name.find( "|" ) == -1:
+ # Non-grouping parameter.
+ group_name = group_index = None
+ else:
+ # Grouping parameter.
+ group, param_name = param_name.split( "|" )
+ index = group.rfind( "_" )
+ group_name = group[ :index ]
+ group_index = int( group[ index + 1: ] )
+
+ return set_value( param_dict, group_name, group_index, param_name, param_value )
+
+ # Set parameters based tool's trackster config.
+ params_set = {}
+ for action in tool.trackster_conf.actions:
+ success = False
+ for joda in original_job.output_datasets:
+ if joda.name == action.output_name:
+ set_param_value( tool_params, action.name, joda.dataset )
+ params_set[ action.name ] = True
+ success = True
+ break
+
+ if not success:
+ return messages.ERROR
+
+ #
+ # Set input datasets for tool. If running on regions, extract and use subset
+ # when possible.
+ #
+ regions_str = ",".join( [ str( r ) for r in regions ] )
+ for jida in original_job.input_datasets:
+ # If param set previously by config actions, do nothing.
+ if jida.name in params_set:
+ continue
+
+ input_dataset = jida.dataset
+ if input_dataset is None: #optional dataset and dataset wasn't selected
+ tool_params[ jida.name ] = None
+ elif run_on_regions and hasattr( input_dataset.datatype, 'get_track_type' ):
+ # Dataset is indexed and hence a subset can be extracted and used
+ # as input.
+
+ # Look for subset.
+ subset_dataset_association = trans.sa_session.query( trans.app.model.HistoryDatasetAssociationSubset ) \
+ .filter_by( hda=input_dataset, location=regions_str ) \
+ .first()
+ if subset_dataset_association:
+ # Data subset exists.
+ subset_dataset = subset_dataset_association.subset
+ else:
+ # Need to create subset.
+ track_type, data_sources = input_dataset.datatype.get_track_type()
+ data_source = data_sources[ 'data' ]
+ converted_dataset = input_dataset.get_converted_dataset( trans, data_source )
+ deps = input_dataset.get_converted_dataset_deps( trans, data_source )
+
+ # Create new HDA for input dataset's subset.
+ new_dataset = trans.app.model.HistoryDatasetAssociation( extension=input_dataset.ext, \
+ dbkey=input_dataset.dbkey, \
+ create_dataset=True, \
+ sa_session=trans.sa_session,
+ name="Subset [%s] of data %i" % \
+ ( regions_str, input_dataset.hid ),
+ visible=False )
+ target_history.add_dataset( new_dataset )
+ trans.sa_session.add( new_dataset )
+ trans.app.security_agent.set_all_dataset_permissions( new_dataset.dataset, hda_permissions )
+
+ # Write subset of data to new dataset
+ data_provider_class = get_data_provider( original_dataset=input_dataset )
+ data_provider = data_provider_class( original_dataset=input_dataset,
+ converted_dataset=converted_dataset,
+ dependencies=deps )
+ trans.app.object_store.create( new_dataset.dataset )
+ data_provider.write_data_to_file( regions, new_dataset.file_name )
+
+ # TODO: (a) size not working; (b) need to set peek.
+ new_dataset.set_size()
+ new_dataset.info = "Data subset for trackster"
+ new_dataset.set_dataset_state( trans.app.model.Dataset.states.OK )
+
+ # Set metadata.
+ # TODO: set meta internally if dataset is small enough?
+ if trans.app.config.set_metadata_externally:
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool,
+ trans, incoming = { 'input1':new_dataset },
+ overwrite=False, job_params={ "source" : "trackster" } )
+ else:
+ message = 'Attributes updated'
+ new_dataset.set_meta()
+ new_dataset.datatype.after_setting_metadata( new_dataset )
+
+ # Add HDA subset association.
+ subset_association = trans.app.model.HistoryDatasetAssociationSubset( hda=input_dataset, subset=new_dataset, location=regions_str )
+ trans.sa_session.add( subset_association )
+
+ subset_dataset = new_dataset
+
+ trans.sa_session.flush()
+
+ # Add dataset to tool's parameters.
+ if not set_param_value( tool_params, jida.name, subset_dataset ):
+ return to_json_string( { "error" : True, "message" : "error setting parameter %s" % jida.name } )
+
+ #
+ # Execute tool and handle outputs.
+ #
+ try:
+ subset_job, subset_job_outputs = tool.execute( trans, incoming=tool_params,
+ history=target_history,
+ job_params={ "source" : "trackster" } )
+ except Exception, e:
+ # Lots of things can go wrong when trying to execute tool.
+ return to_json_string( { "error" : True, "message" : e.__class__.__name__ + ": " + str(e) } )
+ if run_on_regions:
+ for output in subset_job_outputs.values():
+ output.visible = False
+ trans.sa_session.flush()
+
+ #
+ # Return new track that corresponds to the original dataset.
+ #
+ output_name = None
+ for joda in original_job.output_datasets:
+ if joda.dataset == original_dataset:
+ output_name = joda.name
+ break
+ for joda in subset_job.output_datasets:
+ if joda.name == output_name:
+ output_dataset = joda.dataset
+
+ return output_dataset.get_api_value()
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -181,6 +181,37 @@
# -- Mixins for working with Galaxy objects. --
#
+# Message strings returned to browser
+messages = Bunch(
+ PENDING = "pending",
+ NO_DATA = "no data",
+ NO_CHROMOSOME = "no chromosome",
+ NO_CONVERTER = "no converter",
+ NO_TOOL = "no tool",
+ DATA = "data",
+ ERROR = "error",
+ OK = "ok"
+)
+
+def get_highest_priority_msg( message_list ):
+ """
+ Returns highest priority message from a list of messages.
+ """
+ return_message = None
+
+ # For now, priority is: job error (dict), no converter, pending.
+ for message in message_list:
+ if message is not None:
+ if isinstance(message, dict):
+ return_message = message
+ break
+ elif message == messages.NO_CONVERTER:
+ return_message = message
+ elif return_message == None and message == messages.PENDING:
+ return_message = message
+ return return_message
+
+
class SharableItemSecurityMixin:
""" Mixin for handling security for sharable items. """
def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
@@ -201,8 +232,9 @@
raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
return item
-class UsesHistoryMixinDatasetAssociationMixin:
+class UsesHistoryDatasetAssociationMixin:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
+
def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False ):
""" Get an HDA object by id. """
# DEPRECATION: We still support unencoded ids for backward compatibility
@@ -232,6 +264,7 @@
else:
error( "You are not allowed to access this dataset" )
return data
+
def get_history_dataset_association( self, trans, history, dataset_id, check_ownership=True, check_accessible=False ):
"""Get a HistoryDatasetAssociation from the database by id, verifying ownership."""
self.security_check( trans, history, check_ownership=check_ownership, check_accessible=check_accessible )
@@ -244,6 +277,7 @@
else:
error( "You are not allowed to access this dataset" )
return hda
+
def get_data( self, dataset, preview=True ):
""" Gets a dataset's data. """
# Get data from file, truncating if necessary.
@@ -258,6 +292,46 @@
dataset_data = open( dataset.file_name ).read(max_peek_size)
truncated = False
return truncated, dataset_data
+
+ def check_dataset_state( self, trans, dataset ):
+ """
+ Returns a message if dataset is not ready to be used in visualization.
+ """
+ if not dataset:
+ return messages.NO_DATA
+ if dataset.state == trans.app.model.Job.states.ERROR:
+ return messages.ERROR
+ if dataset.state != trans.app.model.Job.states.OK:
+ return messages.PENDING
+ return None
+
+ def convert_dataset( self, trans, dataset, target_type ):
+ """
+ Converts a dataset to the target_type and returns a message indicating
+ status of the conversion. None is returned to indicate that dataset
+ was converted successfully.
+ """
+
+ # Get converted dataset; this will start the conversion if necessary.
+ try:
+ converted_dataset = dataset.get_converted_dataset( trans, target_type )
+ except NoConverterException:
+ return messages.NO_CONVERTER
+ except ConverterDependencyException, dep_error:
+ return { 'kind': messages.ERROR, 'message': dep_error.value }
+
+ # Check dataset state and return any messages.
+ msg = None
+ if converted_dataset and converted_dataset.state == trans.app.model.Dataset.states.ERROR:
+ job_id = trans.sa_session.query( trans.app.model.JobToOutputDatasetAssociation ) \
+ .filter_by( dataset_id=converted_dataset.id ).first().job_id
+ job = trans.sa_session.query( trans.app.model.Job ).get( job_id )
+ msg = { 'kind': messages.ERROR, 'message': job.stderr }
+ elif not converted_dataset or converted_dataset.state != trans.app.model.Dataset.states.OK:
+ msg = messages.PENDING
+
+ return msg
+
class UsesLibraryMixin:
def get_library( self, trans, id, check_ownership=False, check_accessible=True ):
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -150,7 +150,7 @@
.filter( model.History.deleted==False ) \
.filter( self.model_class.visible==True )
-class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistoryMixin, UsesHistoryMixinDatasetAssociationMixin, UsesItemRatings ):
+class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistoryMixin, UsesHistoryDatasetAssociationMixin, UsesItemRatings ):
stored_list_grid = HistoryDatasetAssociationListGrid()
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py
+++ b/lib/galaxy/web/controllers/page.py
@@ -273,7 +273,7 @@
_BaseHTMLProcessor.unknown_endtag( self, tag )
class PageController( BaseUIController, SharableMixin, UsesAnnotations, UsesHistoryMixin,
- UsesStoredWorkflowMixin, UsesHistoryMixinDatasetAssociationMixin, UsesVisualizationMixin, UsesItemRatings ):
+ UsesStoredWorkflowMixin, UsesHistoryDatasetAssociationMixin, UsesVisualizationMixin, UsesItemRatings ):
_page_list = PageListGrid()
_all_published_list = PageAllPublishedGrid()
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -18,17 +18,6 @@
from galaxy.visualization.genomes import decode_dbkey, Genomes
from galaxy.visualization.tracks.visual_analytics import get_tool_def, get_dataset_job
-# Message strings returned to browser
-messages = Bunch(
- PENDING = "pending",
- NO_DATA = "no data",
- NO_CHROMOSOME = "no chromosome",
- NO_CONVERTER = "no converter",
- NO_TOOL = "no tool",
- DATA = "data",
- ERROR = "error",
- OK = "ok"
-)
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, history ):
@@ -163,7 +152,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( self.model_class.user_id == trans.user.id )
-class TracksController( BaseUIController, UsesVisualizationMixin, UsesHistoryMixinDatasetAssociationMixin, SharableMixin ):
+class TracksController( BaseUIController, UsesVisualizationMixin, UsesHistoryDatasetAssociationMixin, SharableMixin ):
"""
Controller for track browser interface. Handles building a new browser from
datasets in the current history, and display of the resulting browser.
@@ -488,281 +477,7 @@
@web.expose
def list_tracks( self, trans, **kwargs ):
return self.tracks_grid( trans, **kwargs )
-
- @web.expose
- def run_tool( self, trans, tool_id, target_dataset_id, **kwargs ):
- """
- Run a tool. This method serves as a general purpose way to run tools asynchronously.
- """
-
- #
- # Set target history (the history that tool will use for outputs) using
- # target dataset. If user owns dataset, put new data in original
- # dataset's history; if user does not own dataset (and hence is accessing
- # dataset via sharing), put new data in user's current history.
- #
- target_dataset = self.get_dataset( trans, target_dataset_id, check_ownership=False, check_accessible=True )
- if target_dataset.history.user == trans.user:
- target_history = target_dataset.history
- else:
- target_history = trans.get_history( create=True )
-
- # HACK: tools require unencoded parameters but kwargs are typically
- # encoded, so try decoding all parameter values.
- for key, value in kwargs.items():
- try:
- value = trans.security.decode_id( value )
- kwargs[ key ] = value
- except:
- pass
-
- #
- # Execute tool.
- #
- tool = trans.app.toolbox.get_tool( tool_id )
- if not tool:
- return messages.NO_TOOL
-
- # HACK: add run button so that tool.handle_input will run tool.
- kwargs['runtool_btn'] = 'Execute'
- params = util.Params( kwargs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__, history=target_history )
-
- # TODO: check for errors and ensure that output dataset is available.
- output_datasets = vars[ 'out_data' ].values()
- return self.add_track_async( trans, output_datasets[0].id )
-
- @web.expose
- def rerun_tool( self, trans, dataset_id, tool_id, chrom=None, low=None, high=None, **kwargs ):
- """
- Rerun a tool to produce a new output dataset that corresponds to a
- dataset that a user is currently viewing.
- """
-
- #
- # TODO: refactor to use same code as run_tool.
- #
-
- # Run tool on region if region is specificied.
- run_on_region = False
- if chrom and low and high:
- run_on_region = True
- low, high = int( low ), int( high )
-
- # Dataset check.
- original_dataset = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
- msg = self._check_dataset_state( trans, original_dataset )
- if msg:
- return to_json_string( msg )
-
- #
- # Set tool parameters--except non-hidden dataset parameters--using combination of
- # job's previous parameters and incoming parameters. Incoming parameters
- # have priority.
- #
- original_job = get_dataset_job( original_dataset )
- tool = trans.app.toolbox.get_tool( original_job.tool_id )
- if not tool:
- return messages.NO_TOOL
- tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
- # TODO: need to handle updates to conditional parameters; conditional
- # params are stored in dicts (and dicts within dicts).
- tool_params.update( dict( [ ( key, value ) for key, value in kwargs.items() if key in tool.inputs ] ) )
- tool_params = tool.params_from_strings( tool_params, self.app )
-
- #
- # If running tool on region, convert input datasets (create indices) so
- # that can regions of data can be quickly extracted.
- #
- messages_list = []
- if run_on_region:
- for jida in original_job.input_datasets:
- input_dataset = jida.dataset
- if get_data_provider( original_dataset=input_dataset ):
- # Can index dataset.
- track_type, data_sources = input_dataset.datatype.get_track_type()
- # Convert to datasource that provides 'data' because we need to
- # extract the original data.
- data_source = data_sources[ 'data' ]
- msg = self._convert_dataset( trans, input_dataset, data_source )
- if msg is not None:
- messages_list.append( msg )
-
- # Return any messages generated during conversions.
- return_message = _get_highest_priority_msg( messages_list )
- if return_message:
- return to_json_string( return_message )
-
- #
- # Set target history (the history that tool will use for inputs/outputs).
- # If user owns dataset, put new data in original dataset's history; if
- # user does not own dataset (and hence is accessing dataset via sharing),
- # put new data in user's current history.
- #
- if original_dataset.history.user == trans.user:
- target_history = original_dataset.history
- else:
- target_history = trans.get_history( create=True )
- hda_permissions = trans.app.security_agent.history_get_default_permissions( target_history )
-
- def set_param_value( param_dict, param_name, param_value ):
- """
- Set new parameter value in a tool's parameter dictionary.
- """
-
- # Recursive function to set param value.
- def set_value( param_dict, group_name, group_index, param_name, param_value ):
- if group_name in param_dict:
- param_dict[ group_name ][ group_index ][ param_name ] = param_value
- return True
- elif param_name in param_dict:
- param_dict[ param_name ] = param_value
- return True
- else:
- # Recursive search.
- return_val = False
- for name, value in param_dict.items():
- if isinstance( value, dict ):
- return_val = set_value( value, group_name, group_index, param_name, param_value)
- if return_val:
- return return_val
- return False
-
- # Parse parameter name if necessary.
- if param_name.find( "|" ) == -1:
- # Non-grouping parameter.
- group_name = group_index = None
- else:
- # Grouping parameter.
- group, param_name = param_name.split( "|" )
- index = group.rfind( "_" )
- group_name = group[ :index ]
- group_index = int( group[ index + 1: ] )
-
- return set_value( param_dict, group_name, group_index, param_name, param_value )
-
- # Set parameters based tool's trackster config.
- params_set = {}
- for action in tool.trackster_conf.actions:
- success = False
- for joda in original_job.output_datasets:
- if joda.name == action.output_name:
- set_param_value( tool_params, action.name, joda.dataset )
- params_set[ action.name ] = True
- success = True
- break
- if not success:
- return messages.ERROR
-
- #
- # Set input datasets for tool. If running on region, extract and use subset
- # when possible.
- #
- location = "%s:%i-%i" % ( chrom, low, high )
- for jida in original_job.input_datasets:
- # If param set previously by config actions, do nothing.
- if jida.name in params_set:
- continue
-
- input_dataset = jida.dataset
- if input_dataset is None: #optional dataset and dataset wasn't selected
- tool_params[ jida.name ] = None
- elif run_on_region and hasattr( input_dataset.datatype, 'get_track_type' ):
- # Dataset is indexed and hence a subset can be extracted and used
- # as input.
-
- # Look for subset.
- subset_dataset_association = trans.sa_session.query( trans.app.model.HistoryDatasetAssociationSubset ) \
- .filter_by( hda=input_dataset, location=location ) \
- .first()
- if subset_dataset_association:
- # Data subset exists.
- subset_dataset = subset_dataset_association.subset
- else:
- # Need to create subset.
- track_type, data_sources = input_dataset.datatype.get_track_type()
- data_source = data_sources[ 'data' ]
- converted_dataset = input_dataset.get_converted_dataset( trans, data_source )
- deps = input_dataset.get_converted_dataset_deps( trans, data_source )
-
- # Create new HDA for input dataset's subset.
- new_dataset = trans.app.model.HistoryDatasetAssociation( extension=input_dataset.ext, \
- dbkey=input_dataset.dbkey, \
- create_dataset=True, \
- sa_session=trans.sa_session,
- name="Subset [%s] of data %i" % \
- ( location, input_dataset.hid ),
- visible=False )
- target_history.add_dataset( new_dataset )
- trans.sa_session.add( new_dataset )
- trans.app.security_agent.set_all_dataset_permissions( new_dataset.dataset, hda_permissions )
-
- # Write subset of data to new dataset
- data_provider_class = get_data_provider( original_dataset=input_dataset )
- data_provider = data_provider_class( original_dataset=input_dataset,
- converted_dataset=converted_dataset,
- dependencies=deps )
- trans.app.object_store.create( new_dataset.dataset )
- data_provider.write_data_to_file( chrom, low, high, new_dataset.file_name )
-
- # TODO: (a) size not working; (b) need to set peek.
- new_dataset.set_size()
- new_dataset.info = "Data subset for trackster"
- new_dataset.set_dataset_state( trans.app.model.Dataset.states.OK )
-
- # Set metadata.
- # TODO: set meta internally if dataset is small enough?
- if trans.app.config.set_metadata_externally:
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool,
- trans, incoming = { 'input1':new_dataset },
- overwrite=False, job_params={ "source" : "trackster" } )
- else:
- message = 'Attributes updated'
- new_dataset.set_meta()
- new_dataset.datatype.after_setting_metadata( new_dataset )
-
- # Add HDA subset association.
- subset_association = trans.app.model.HistoryDatasetAssociationSubset( hda=input_dataset, subset=new_dataset, location=location )
- trans.sa_session.add( subset_association )
-
- subset_dataset = new_dataset
-
- trans.sa_session.flush()
-
- # Add dataset to tool's parameters.
- if not set_param_value( tool_params, jida.name, subset_dataset ):
- return to_json_string( { "error" : True, "message" : "error setting parameter %s" % jida.name } )
-
- #
- # Execute tool and handle outputs.
- #
- try:
- subset_job, subset_job_outputs = tool.execute( trans, incoming=tool_params,
- history=target_history,
- job_params={ "source" : "trackster" } )
- except Exception, e:
- # Lots of things can go wrong when trying to execute tool.
- return to_json_string( { "error" : True, "message" : e.__class__.__name__ + ": " + str(e) } )
- if run_on_region:
- for output in subset_job_outputs.values():
- output.visible = False
- trans.sa_session.flush()
-
- #
- # Return new track that corresponds to the original dataset.
- #
- output_name = None
- for joda in original_job.output_datasets:
- if joda.dataset == original_dataset:
- output_name = joda.name
- break
- for joda in subset_job.output_datasets:
- if joda.name == output_name:
- output_dataset = joda.dataset
-
- return self.add_track_async( trans, output_dataset.id )
-
@web.expose
@web.require_login( "use Galaxy visualizations", use_panels=True )
def paramamonster( self, trans, hda_ldda, dataset_id ):
@@ -799,18 +514,6 @@
# Helper methods.
# -----------------
- def _check_dataset_state( self, trans, dataset ):
- """
- Returns a message if dataset is not ready to be used in visualization.
- """
- if not dataset:
- return messages.NO_DATA
- if dataset.state == trans.app.model.Job.states.ERROR:
- return messages.ERROR
- if dataset.state != trans.app.model.Job.states.OK:
- return messages.PENDING
- return None
-
def _get_datasources( self, trans, dataset ):
"""
Returns datasources for dataset; if datasources are not available
@@ -833,56 +536,10 @@
data_sources_dict[ source_type ] = { "name" : data_source, "message": msg }
return data_sources_dict
-
- def _convert_dataset( self, trans, dataset, target_type ):
- """
- Converts a dataset to the target_type and returns a message indicating
- status of the conversion. None is returned to indicate that dataset
- was converted successfully.
- """
-
- # Get converted dataset; this will start the conversion if necessary.
- try:
- converted_dataset = dataset.get_converted_dataset( trans, target_type )
- except NoConverterException:
- return messages.NO_CONVERTER
- except ConverterDependencyException, dep_error:
- return { 'kind': messages.ERROR, 'message': dep_error.value }
-
- # Check dataset state and return any messages.
- msg = None
- if converted_dataset and converted_dataset.state == model.Dataset.states.ERROR:
- job_id = trans.sa_session.query( trans.app.model.JobToOutputDatasetAssociation ) \
- .filter_by( dataset_id=converted_dataset.id ).first().job_id
- job = trans.sa_session.query( trans.app.model.Job ).get( job_id )
- msg = { 'kind': messages.ERROR, 'message': job.stderr }
- elif not converted_dataset or converted_dataset.state != model.Dataset.states.OK:
- msg = messages.PENDING
-
- return msg
-
+
def _get_dataset( self, trans, hda_ldda, dataset_id ):
""" Returns either HDA or LDDA for hda/ldda and id combination. """
if hda_ldda == "hda":
return self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
else:
- return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
-
-
-def _get_highest_priority_msg( message_list ):
- """
- Returns highest priority message from a list of messages.
- """
- return_message = None
-
- # For now, priority is: job error (dict), no converter, pending.
- for message in message_list:
- if message is not None:
- if isinstance(message, dict):
- return_message = message
- break
- elif message == messages.NO_CONVERTER:
- return_message = message
- elif return_message == None and message == messages.PENDING:
- return_message = message
- return return_message
+ return trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
\ No newline at end of file
diff -r b2eabe39a70f676b8cb3b90a656501804547fd87 -r 4fbd05095ca70adf740ed635451c0ec876635f50 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -69,7 +69,7 @@
class VisualizationController( BaseUIController, SharableMixin, UsesAnnotations,
- UsesHistoryMixinDatasetAssociationMixin, UsesVisualizationMixin,
+ UsesHistoryDatasetAssociationMixin, UsesVisualizationMixin,
UsesItemRatings ):
_user_list_grid = VisualizationListGrid()
_published_list_grid = VisualizationAllPublishedGrid()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b2eabe39a70f/
changeset: b2eabe39a70f
user: greg
date: 2012-06-12 21:15:08
summary: Enhance the tool's DependencyManager to handle information stored in tool_dependency databse records when finding tool dependencies installed with tool shed repositories. Many miscellaneous tool shed dependency fixes are included with this change set. Several fixes for uninstalling and reinstalling tool shed repositories and associated tool dependenciesare also included.
affected #: 9 files
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -120,7 +120,7 @@
if not is_displayed:
is_displayed = True
return is_displayed, tool_sections
- def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision,
+ def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, installed_changeset_revision,
ctx_rev, install_dependencies ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
@@ -141,11 +141,11 @@
# Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked
# deleted, undelete it. It is critical that this happens before the call to add_to_tool_panel() below because
# tools will not be properly loaded if the repository is marked deleted.
- print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name
+ print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name
tool_shed_repository = create_or_update_tool_shed_repository( self.app,
repository_name,
description,
- changeset_revision,
+ installed_changeset_revision,
ctx_rev,
repository_clone_url,
metadata_dict,
@@ -158,7 +158,7 @@
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
repository_tools_tups = handle_missing_data_table_entry( self.app,
tool_shed_repository,
- changeset_revision,
+ installed_changeset_revision,
self.tool_path,
repository_tools_tups,
work_dir )
@@ -171,12 +171,12 @@
tool_dependencies_config = get_config_from_repository( self.app,
'tool_dependencies.xml',
tool_shed_repository,
- changeset_revision,
+ installed_changeset_revision,
work_dir )
# Install tool dependencies.
status, message = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
- installed_changeset_revision=changeset_revision,
+ installed_changeset_revision=installed_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
if status != 'ok' and message:
print 'The following error occurred from the InstallManager while installing tool dependencies:'
@@ -184,7 +184,7 @@
add_to_tool_panel( self.app,
repository_name,
repository_clone_url,
- changeset_revision,
+ installed_changeset_revision,
repository_tools_tups,
self.repository_owner,
self.migrated_tools_config,
@@ -199,7 +199,7 @@
datatypes_config = get_config_from_repository( self.app,
'datatypes_conf.xml',
tool_shed_repository,
- changeset_revision,
+ installed_changeset_revision,
work_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
@@ -209,7 +209,7 @@
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
name=repository_name,
owner=self.repository_owner,
- installed_changeset_revision=changeset_revision,
+ installed_changeset_revision=installed_changeset_revision,
tool_dicts=metadata_dict.get( 'tools', [] ),
converter_path=converter_path,
display_path=display_path )
@@ -228,29 +228,29 @@
# Install a single repository, loading contained tools into the tool panel.
name = repository_elem.get( 'name' )
description = repository_elem.get( 'description' )
- changeset_revision = repository_elem.get( 'changeset_revision' )
+ installed_changeset_revision = repository_elem.get( 'changeset_revision' )
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
- clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
+ clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
if self.__isinstalled( clone_dir ):
print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
else:
tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( clone_dir, name )
- ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, changeset_revision )
+ ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
tool_shed_repository, metadata_dict = self.handle_repository_contents( repository_clone_url,
relative_install_dir,
repository_elem,
name,
description,
- changeset_revision,
+ installed_changeset_revision,
ctx_rev,
install_dependencies )
if 'tools' in metadata_dict:
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \
- ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision )
+ ( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
response.close()
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,6 +1,10 @@
-import os, tarfile, urllib2
+import os, shutil, tarfile, urllib2
from galaxy.datatypes.checkers import *
+DIRECTORY_BUILD_COMMAND_NAMES = [ 'change_directory' ]
+MOVE_BUILD_COMMAND_NAMES = [ 'move_directory_files', 'move_file' ]
+ALL_BUILD_COMMAND_NAMES = DIRECTORY_BUILD_COMMAND_NAMES + MOVE_BUILD_COMMAND_NAMES
+
def extract_tar( file_name, file_path ):
if isgzip( file_name ) or isbz2( file_name ):
# Open for reading with transparent compression.
@@ -17,6 +21,21 @@
return tarfile.is_tarfile( file_path )
def iszip( file_path ):
return check_zip( file_path )
+def move_directory_files( current_dir, source_dir, destination_dir ):
+ source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) )
+ destination_directory = os.path.join( destination_dir )
+ if not os.path.isdir( destination_directory ):
+ os.makedirs( destination_directory )
+ for file_name in os.listdir( source_directory ):
+ source_file = os.path.join( source_directory, file_name )
+ destination_file = os.path.join( destination_directory, file_name )
+ shutil.move( source_file, destination_file )
+def move_file( current_dir, source, destination_dir ):
+ source_file = os.path.abspath( os.path.join( current_dir, source ) )
+ destination_directory = os.path.join( destination_dir )
+ if not os.path.isdir( destination_directory ):
+ os.makedirs( destination_directory )
+ shutil.move( source_file, destination_directory )
def tar_extraction_directory( file_path, file_name ):
file_name = file_name.strip()
extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.zip' ]
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -1,7 +1,7 @@
# For Python 2.5
from __future__ import with_statement
-import os, shutil
+import os, shutil, tempfile
from contextlib import contextmanager
import common_util
@@ -13,10 +13,6 @@
from fabric.api import env, lcd, local, settings
-DIRECTORY_BUILD_COMMAND_NAMES = [ 'change_directory' ]
-MOVE_BUILD_COMMAND_NAMES = [ 'move_directory_files', 'move_file' ]
-ALL_BUILD_COMMAND_NAMES = DIRECTORY_BUILD_COMMAND_NAMES + MOVE_BUILD_COMMAND_NAMES
-
def check_fabric_version():
version = env.version
if int( version.split( "." )[ 0 ] ) < 1:
@@ -32,18 +28,12 @@
return env
@contextmanager
def make_tmp_dir():
- tmp_dir = local( 'echo $TMPDIR' ).strip()
- if not tmp_dir:
- home_dir = local( 'echo $HOME' )
- tmp_dir = os.path.join( home_dir, 'tmp' )
- work_dir = os.path.join( tmp_dir, 'deploy_tmp' )
- if not os.path.exists( work_dir ):
- local( 'mkdir -p %s' % work_dir )
+ work_dir = tempfile.mkdtemp()
yield work_dir
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
def handle_post_build_processing( tool_dependency_dir, install_dir, package_name=None ):
- cmd = "echo 'PATH=%s/bin:$PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir )
+ cmd = "echo 'PATH=%s/bin:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir )
message = ''
output = local( cmd, capture=True )
log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) )
@@ -94,7 +84,7 @@
build_command_items = build_command_key.split( 'v^v^v' )
build_command_name = build_command_items[ 0 ]
build_command = build_command_items[ 1 ]
- elif build_command_key in ALL_BUILD_COMMAND_NAMES:
+ elif build_command_key in common_util.ALL_BUILD_COMMAND_NAMES:
build_command_name = build_command_key
else:
build_command_name = None
@@ -103,16 +93,13 @@
current_dir = os.path.join( current_dir, build_command )
lcd( current_dir )
elif build_command_name == 'move_directory_files':
- source_directory = os.path.abspath( os.path.join( current_dir, build_command_dict[ 'source_directory' ] ) )
- destination_directory = build_command_dict[ 'destination_directory' ]
- for file_name in os.listdir( source_directory ):
- source_file = os.path.join( source_directory, file_name )
- destination_file = os.path.join( destination_directory, file_name )
- shutil.move( source_file, destination_file )
+ common_util.move_directory_files( current_dir=current_dir,
+ source_dir=os.path.join( build_command_dict[ 'source_directory' ] ),
+ destination_dir=os.path.join( build_command_dict[ 'destination_directory' ] ) )
elif build_command_name == 'move_file':
- source_file = os.path.abspath( os.path.join( current_dir, build_command_dict[ 'source' ] ) )
- destination = build_command_dict[ 'destination' ]
- shutil.move( source_file, destination )
+ common_util.move_file( current_dir=current_dir,
+ source=os.path.join( build_command_dict[ 'source' ] ),
+ destination_dir=os.path.join( build_command_dict[ 'destination' ] ) )
else:
build_command = build_command_key
with settings( warn_only=True ):
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -1,4 +1,5 @@
import sys, os, subprocess, tempfile
+from common_util import *
from fabric_util import *
from galaxy.tool_shed.encoding_util import *
from galaxy.model.orm import *
@@ -11,16 +12,32 @@
from elementtree.ElementTree import Element, SubElement
def create_or_update_tool_dependency( app, tool_shed_repository, changeset_revision, name, version, type ):
+ """
+ This method is called from Galaxy (never the tool shed) when a new tool_shed_repository is being installed or when an ininstalled repository is
+ being reinstalled.
+ """
+ # First see if a tool_dependency record exists for the received changeset_revision.
sa_session = app.model.context.current
tool_dependency = get_tool_dependency_by_shed_changeset_revision( app, tool_shed_repository, name, version, type, changeset_revision )
if tool_dependency:
tool_dependency.uninstalled = False
else:
- tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
- installed_changeset_revision=changeset_revision,
- name=name,
- version=version,
- type=type )
+ # Check the tool_shed_repository's set of tool_depnedency records for any that are marked uninstalled. If one is found, set uninstalled to
+ # False and update the value of installed_changeset_revision.
+ found = False
+ for tool_dependency in tool_shed_repository.tool_dependencies:
+ if tool_dependency.name == name and tool_dependency.version == version and tool_dependency.type == type and tool_dependency.uninstalled:
+ found = True
+ tool_dependency.uninstalled = False
+ tool_dependency.installed_changeset_revision = changeset_revision
+ break
+ if not found:
+ # Create a new tool_dependency record for the tool_shed_repository.
+ tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
+ installed_changeset_revision=changeset_revision,
+ name=name,
+ version=version,
+ type=type )
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -794,6 +794,20 @@
if tool_version:
return tool_version.get_version_ids( self.app )
return []
+ @property
+ def installed_tool_dependencies( self ):
+ # If this tool is included in an installed tool shed repository and tool dependencies were installed along with the
+ # tool shed repository, then this method will return the repository's ToolDependency records.
+ if self.app.config.use_tool_dependencies:
+ if self.tool_shed:
+ tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
+ if tool_shed_repository:
+ return tool_shed_repository.tool_dependencies
+ return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
available_configs = []
@@ -1113,7 +1127,7 @@
for stdio_elem in ( root.findall( 'stdio' ) ):
self.parse_stdio_exit_codes( stdio_elem )
self.parse_stdio_regexes( stdio_elem )
- except Exception as e:
+ except Exception, e:
log.error( "Exception in parse_stdio! " + str(sys.exc_info()) )
def parse_stdio_exit_codes( self, stdio_elem ):
@@ -1185,7 +1199,7 @@
log.warning( "Tool exit_code range %s will match on "
+ "all exit codes" % code_range )
self.stdio_exit_codes.append( exit_code )
- except Exception as e:
+ except Exception, e:
log.error( "Exception in parse_stdio_exit_codes! "
+ str(sys.exc_info()) )
trace = sys.exc_info()[2]
@@ -1244,7 +1258,7 @@
regex.stdout_match = True
regex.stderr_match = True
self.stdio_regexes.append( regex )
- except Exception as e:
+ except Exception, e:
log.error( "Exception in parse_stdio_exit_codes! "
+ str(sys.exc_info()) )
trace = sys.exc_info()[2]
@@ -1270,7 +1284,7 @@
return_level = "warning"
elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
return_level = "fatal"
- except Exception as e:
+ except Exception, e:
log.error( "Exception in parse_error_level "
+ str(sys.exc_info() ) )
trace = sys.exc_info()[2]
@@ -2323,9 +2337,12 @@
# TODO: currently only supporting requirements of type package,
# need to implement some mechanism for mapping other types
# back to packages
- log.debug( "Dependency %s", requirement.name )
+ log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
if requirement.type == 'package':
- script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( requirement.name, requirement.version )
+ script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
+ version=requirement.version,
+ type=requirement.type,
+ installed_tool_dependencies=self.installed_tool_dependencies )
if script_file is None and base_path is None:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
elif script_file is None:
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -30,7 +30,7 @@
if not os.path.isdir( base_path ):
log.warn( "Path '%s' is not directory, ignoring", base_path )
self.base_paths.append( os.path.abspath( base_path ) )
- def find_dep( self, name, version=None ):
+ def find_dep( self, name, version=None, type='package', installed_tool_dependencies=None ):
"""
Attempt to find a dependency named `name` at version `version`. If
version is None, return the "default" version as determined using a
@@ -40,10 +40,24 @@
if version is None:
return self._find_dep_default( name )
else:
- return self._find_dep_versioned( name, version )
- def _find_dep_versioned( self, name, version ):
+ return self._find_dep_versioned( name, version, installed_tool_dependencies=installed_tool_dependencies )
+ def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
+ installed_dependency = None
+ if installed_tool_dependencies:
+ for installed_dependency in installed_tool_dependencies:
+ if not installed_dependency.uninstalled:
+ if installed_dependency.name == name and installed_dependency.version == version and installed_dependency.type == type:
+ break
for base_path in self.base_paths:
- path = os.path.join( base_path, name, version )
+ if installed_dependency:
+ tool_shed_repository = installed_dependency.tool_shed_repository
+ path = os.path.join( base_path,
+ name, version,
+ tool_shed_repository.owner,
+ tool_shed_repository.name,
+ installed_dependency.installed_changeset_revision )
+ else:
+ path = os.path.join( base_path, name, version )
script = os.path.join( path, 'env.sh' )
if os.path.exists( script ):
return script, path, version
@@ -51,7 +65,7 @@
return None, path, version
else:
return None, None, None
- def _find_dep_default( self, name ):
+ def _find_dep_default( self, name, type='package' ):
version = None
for base_path in self.base_paths:
path = os.path.join( base_path, name, 'default' )
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -7,6 +7,7 @@
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import install_package
+from galaxy.tool_shed.encoding_util import *
from galaxy.model.orm import *
from galaxy import eggs
@@ -310,20 +311,24 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
-def create_or_update_tool_shed_repository( app, name, description, changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
- owner='', dist_to_shed=False ):
+def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
+ current_changeset_revision=None, owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
# to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
- sa_session = app.model.context.current
- tmp_url = clean_repository_clone_url( repository_clone_url )
- tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
+ if current_changeset_revision is None:
+ # The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository
+ # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior to
+ # it being uninstalled.
+ current_changeset_revision = installed_changeset_revision
+ sa_session = app.model.context.current
+ tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
if not owner:
- owner = get_repository_owner( tmp_url )
+ owner = get_repository_owner_from_clone_url( repository_clone_url )
includes_datatypes = 'datatypes' in metadata_dict
- tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
+ tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision )
if tool_shed_repository:
tool_shed_repository.description = description
- tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.changeset_revision = current_changeset_revision
tool_shed_repository.ctx_rev = ctx_rev
tool_shed_repository.metadata = metadata_dict
tool_shed_repository.includes_datatypes = includes_datatypes
@@ -335,7 +340,7 @@
description=description,
owner=owner,
installed_changeset_revision=changeset_revision,
- changeset_revision=changeset_revision,
+ changeset_revision=current_changeset_revision,
ctx_rev=ctx_rev,
metadata=metadata_dict,
includes_datatypes=includes_datatypes,
@@ -876,23 +881,16 @@
fh.close()
return tmp_filename
return None
-def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
- sa_session = app.model.context.current
- if tool_shed.find( '//' ) > 0:
- tool_shed = tool_shed.split( '//' )[1]
- tool_shed = tool_shed.rstrip( '/' )
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
if repo_path.startswith( '/' ):
repo_path = repo_path.replace( '/', '', 1 )
return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
+def get_repository_owner_from_clone_url( repository_clone_url ):
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
+ return get_repository_owner( tmp_url )
def get_repository_tools_tups( app, metadata_dict ):
repository_tools_tups = []
if 'tools' in metadata_dict:
@@ -988,6 +986,33 @@
relative_install_dir = os.path.join( tool_path, partial_install_dir )
return tool_path, relative_install_dir
return None, None
+def get_tool_shed_from_clone_url( repository_clone_url ):
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
+def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
+ # This method is used only in Galaxy, not the tool shed.
+ sa_session = app.model.context.current
+ if tool_shed.find( '//' ) > 0:
+ tool_shed = tool_shed.split( '//' )[1]
+ tool_shed = tool_shed.rstrip( '/' )
+ return sa_session.query( app.model.ToolShedRepository ) \
+ .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.model.ToolShedRepository.table.c.name == name,
+ app.model.ToolShedRepository.table.c.owner == owner,
+ app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ):
+ # This method is used only in Galaxy, not the tool shed.
+ sa_session = app.model.context.current
+ if tool_shed.find( '//' ) > 0:
+ tool_shed = tool_shed.split( '//' )[1]
+ tool_shed = tool_shed.rstrip( '/' )
+ return sa_session.query( app.model.ToolShedRepository ) \
+ .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.model.ToolShedRepository.table.c.name == name,
+ app.model.ToolShedRepository.table.c.owner == owner,
+ app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
+ .first()
def get_tool_version( app, tool_id ):
sa_session = app.model.context.current
return sa_session.query( app.model.ToolVersion ) \
@@ -1000,6 +1025,24 @@
.filter( and_( app.model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
app.model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
.first()
+def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
+ """Return the changeset revision hash to which the repository can be updated."""
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ url = '%s/repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s&no_reset=true' % \
+ ( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
+ try:
+ response = urllib2.urlopen( url )
+ encoded_update_dict = response.read()
+ if encoded_update_dict:
+ update_dict = tool_shed_decode( encoded_update_dict )
+ changeset_revision = update_dict[ 'changeset_revision' ]
+ ctx_rev = update_dict[ 'ctx_rev' ]
+ response.close()
+ except Exception, e:
+ log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
+ changeset_revision = None
+ ctx_rev = None
+ return changeset_revision, ctx_rev
def get_url_from_repository_tool_shed( app, repository ):
"""
The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
@@ -1015,7 +1058,8 @@
def handle_missing_data_table_entry( app, repository, changeset_revision, tool_path, repository_tools_tups, dir ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
- tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed.
+ tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed
+ or reinstalled.
"""
missing_data_table_entry = False
for index, repository_tools_tup in enumerate( repository_tools_tups ):
@@ -1079,10 +1123,10 @@
return error, message
def handle_tool_dependencies( app, tool_shed_repository, installed_changeset_revision, tool_dependencies_config ):
"""
- Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can refer to installation
- methods in Galaxy's tool_dependencies module or to proprietary fabric scripts contained in the repository. Future enhancements
- to handling tool dependencies may provide installation processes in addition to fabric based processes. The dependencies will be
- installed in:
+ Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
+ methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
+ Future enhancements to handling tool dependencies may provide installation processes in addition to fabric based processes. The dependencies
+ will be installed in:
~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repository_owner>/<repository_name>/<installed_changeset_revision>
"""
status = 'ok'
@@ -1160,8 +1204,9 @@
def load_installed_display_applications( installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-def load_repository_contents( trans, repository_name, description, owner, changeset_revision, ctx_rev, tool_path, repository_clone_url,
- relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None, install_tool_dependencies=False ):
+def load_repository_contents( trans, repository_name, description, owner, installed_changeset_revision, current_changeset_revision, ctx_rev,
+ tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None,
+ install_tool_dependencies=False ):
"""
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
@@ -1174,10 +1219,12 @@
tool_shed_repository = create_or_update_tool_shed_repository( trans.app,
repository_name,
description,
- changeset_revision,
+ installed_changeset_revision,
ctx_rev,
repository_clone_url,
metadata_dict,
+ current_changeset_revision=current_changeset_revision,
+ owner='',
dist_to_shed=False )
if 'tools' in metadata_dict:
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
@@ -1187,7 +1234,7 @@
work_dir = make_tmp_directory()
repository_tools_tups = handle_missing_data_table_entry( trans.app,
tool_shed_repository,
- changeset_revision,
+ current_changeset_revision,
tool_path,
repository_tools_tups,
work_dir )
@@ -1201,12 +1248,14 @@
tool_dependencies_config = get_config_from_repository( trans.app,
'tool_dependencies.xml',
tool_shed_repository,
- changeset_revision,
+ current_changeset_revision,
work_dir )
- # Install dependencies for repository tools.
+ # Install dependencies for repository tools. The tool_dependency.installed_changeset_revision value will be the value of
+ # tool_shed_repository.changeset_revision (this method's current_changeset_revision). This approach will allow for different
+ # versions of the same tool_dependency to be installed for associated versions of tools included in the installed repository.
status, message = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
- installed_changeset_revision=changeset_revision,
+ installed_changeset_revision=current_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
if status != 'ok' and message:
print 'The following error occurred from load_repository_contents while installing tool dependencies:'
@@ -1214,7 +1263,7 @@
add_to_tool_panel( app=trans.app,
repository_name=repository_name,
repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
+ changeset_revision=current_changeset_revision,
repository_tools_tups=repository_tools_tups,
owner=owner,
shed_tool_conf=shed_tool_conf,
@@ -1229,7 +1278,7 @@
datatypes_config = get_config_from_repository( trans.app,
'datatypes_conf.xml',
tool_shed_repository,
- changeset_revision,
+ current_changeset_revision,
work_dir )
# Load data types required by tools.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=False )
@@ -1238,7 +1287,7 @@
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
name=repository_name,
owner=owner,
- installed_changeset_revision=changeset_revision,
+ installed_changeset_revision=installed_changeset_revision,
tool_dicts=metadata_dict.get( 'tools', [] ),
converter_path=converter_path,
display_path=display_path )
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -469,7 +469,8 @@
repository_name=name,
description=description,
owner=owner,
- changeset_revision=changeset_revision,
+ installed_changeset_revision=changeset_revision,
+ current_changeset_revision=changeset_revision,
ctx_rev=ctx_rev,
tool_path=tool_path,
repository_clone_url=repository_clone_url,
@@ -606,7 +607,8 @@
def reinstall_repository( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository_id = kwd[ 'id' ]
+ repository = get_repository( trans, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
@@ -621,6 +623,12 @@
else:
ctx_rev = repository.ctx_rev
clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
+ current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, repository )
+ if current_ctx_rev != ctx_rev:
+ repo = hg.repository( get_configured_ui(), path=os.path.abspath( relative_install_dir ) )
+ pull_repository( repo, repository_clone_url, current_changeset_revision )
+ update_repository( repo, ctx_rev=current_ctx_rev )
tool_section = None
if repository.includes_tools:
# Get the location in the tool panel in which each tool was originally loaded.
@@ -681,7 +689,8 @@
repository_name=repository.name,
description=repository.description,
owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
+ installed_changeset_revision=repository.installed_changeset_revision,
+ current_changeset_revision=current_changeset_revision,
ctx_rev=ctx_rev,
tool_path=tool_path,
repository_clone_url=repository_clone_url,
@@ -691,6 +700,7 @@
shed_tool_conf=shed_tool_conf,
install_tool_dependencies=install_tool_dependencies )
if error_message:
+ # We'll only have an error_message if there was a problem installing tool dependencies.
message += error_message
status = 'error'
repository.uninstalled = False
@@ -787,7 +797,7 @@
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
latest_ctx_rev = params.get( 'latest_ctx_rev', None )
- repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
+ repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -648,6 +648,10 @@
elif not update_to_changeset_hash and changeset_hash == changeset_revision:
# We've found the changeset in the changelog for which we need to get the next update.
update_to_changeset_hash = changeset_hash
+ if from_update_manager:
+ if latest_changeset_revision == changeset_revision:
+ return no_update
+ return update
url += str( latest_changeset_revision )
url += '&latest_ctx_rev=%s' % str( update_to_ctx.rev() )
return trans.response.send_redirect( url )
@@ -1111,6 +1115,57 @@
return to_json_string( tool_version_dicts )
return ''
@web.expose
+ def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
+ """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ # Default to the received changeset revision and ctx_rev.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ latest_changeset_revision = changeset_revision
+ update_dict = dict( changeset_revision=update_to_ctx, ctx_rev=str( update_to_ctx.rev() ) )
+ if changeset_revision == repository.tip:
+ # If changeset_revision is the repository tip, there are no additional updates.
+ return tool_shed_encode( update_dict )
+ else:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
+ return tool_shed_encode( update_dict )
+ else:
+ # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
+ # repository was installed. We need to find the changeset_revision to which we need to update.
+ update_to_changeset_hash = None
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ ctx = get_changectx_for_changeset( repo, changeset_hash )
+ if update_to_changeset_hash:
+ if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
+ # We found a RepositoryMetadata record.
+ if changeset_hash == repository.tip:
+ # The current ctx is the repository tip, so use it.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ latest_changeset_revision = changeset_hash
+ else:
+ update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
+ latest_changeset_revision = update_to_changeset_hash
+ break
+ elif not update_to_changeset_hash and changeset_hash == changeset_revision:
+ # We've found the changeset in the changelog for which we need to get the next update.
+ update_to_changeset_hash = changeset_hash
+ update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
+ update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
+ return tool_shed_encode( update_dict )
+ @web.expose
def help( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Migration script to alter the tool_dependency.version database table column type to Text.
by Bitbucket 12 Jun '12
by Bitbucket 12 Jun '12
12 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b762062399b3/
changeset: b762062399b3
user: greg
date: 2012-06-12 21:09:39
summary: Migration script to alter the tool_dependency.version database table column type to Text.
affected #: 2 files
diff -r 9a3ab5009e8e70596a07283e3611661410f92c4d -r b762062399b3cab0a4139dc3fcd33f30945e49ac lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -397,7 +397,7 @@
Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "name", TrimmedString( 255 ) ),
- Column( "version", TrimmedString( 40 ) ),
+ Column( "version", Text ),
Column( "type", TrimmedString( 40 ) ),
Column( "uninstalled", Boolean, default=False ) )
diff -r 9a3ab5009e8e70596a07283e3611661410f92c4d -r b762062399b3cab0a4139dc3fcd33f30945e49ac lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py
@@ -0,0 +1,53 @@
+"""
+Migration script to alter the type of the tool_dependency.version column from TrimmedString(40) to Text.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ # Change the tool_dependency table's version column from TrimmedString to Text.
+ if migrate_engine.name == 'postgres':
+ cmd = "ALTER TABLE tool_dependency ALTER COLUMN version TYPE Text;"
+ elif migrate_engine.name == 'mysql':
+ cmd = "ALTER TABLE tool_dependency MODIFY COLUMN version Text;"
+ else:
+ # We don't have to do anything for sqlite tables. From the sqlite documentation at http://sqlite.org/datatype3.html:
+ # 1.0 Storage Classes and Datatypes
+ # Each value stored in an SQLite database (or manipulated by the database engine) has one of the following storage classes:
+ # NULL. The value is a NULL value.
+ # INTEGER. The value is a signed integer, stored in 1, 2, 3, 4, 6, or 8 bytes depending on the magnitude of the value.
+ # REAL. The value is a floating point value, stored as an 8-byte IEEE floating point number.
+ # TEXT. The value is a text string, stored using the database encoding (UTF-8, UTF-16BE or UTF-16LE).
+ # BLOB. The value is a blob of data, stored exactly as it was input.
+ cmd = None
+ if cmd:
+ try:
+ db_session.execute( cmd )
+ except Exception, e:
+ log.debug( "Altering tool_dependency.version column from TrimmedString(40) to Text failed: %s" % str( e ) )
+def downgrade():
+ # Not necessary to change column type Text to TrimmedString(40).
+ pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Allow OpenID to work when require_login is set to True.
by Bitbucket 12 Jun '12
by Bitbucket 12 Jun '12
12 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9a3ab5009e8e/
changeset: 9a3ab5009e8e
user: dan
date: 2012-06-12 20:12:46
summary: Allow OpenID to work when require_login is set to True.
affected #: 1 file
diff -r 36c540d788c3842b37c4ab5e522afd612ea8bd80 -r 9a3ab5009e8e70596a07283e3611661410f92c4d lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -435,6 +435,9 @@
url_for( controller='user', action='manage_user_info' ),
url_for( controller='user', action='set_default_permissions' ),
url_for( controller='user', action='reset_password' ),
+ url_for( controller='user', action='openid_auth' ),
+ url_for( controller='user', action='openid_process' ),
+ url_for( controller='user', action='openid_associate' ),
url_for( controller='library', action='browse' ),
url_for( controller='history', action='list' ),
url_for( controller='dataset', action='list' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Have Bowtie2 wrapper produce BAM rather than SAM output.
by Bitbucket 11 Jun '12
by Bitbucket 11 Jun '12
11 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/36c540d788c3/
changeset: 36c540d788c3
user: jgoecks
date: 2012-06-11 21:52:36
summary: Have Bowtie2 wrapper produce BAM rather than SAM output.
affected #: 2 files
diff -r 17302ca3be4e7b3d987733ea6e61982f9520996d -r 36c540d788c3842b37c4ab5e522afd612ea8bd80 tools/sr_mapping/bowtie2_wrapper.py
--- a/tools/sr_mapping/bowtie2_wrapper.py
+++ b/tools/sr_mapping/bowtie2_wrapper.py
@@ -65,7 +65,7 @@
index_path = options.index_path
# Build bowtie command.
- cmd = 'bowtie2 %s -x %s %s -S %s'
+ cmd = 'bowtie2 %s -x %s %s | samtools view -Sb - > %s'
# Set up reads.
if options.single_paired == 'paired':
diff -r 17302ca3be4e7b3d987733ea6e61982f9520996d -r 36c540d788c3842b37c4ab5e522afd612ea8bd80 tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -112,7 +112,7 @@
</inputs><outputs>
- <data format="sam" name="output" label="${tool.name} on ${on_string}: mapped reads">
+ <data format="bam" name="output" label="${tool.name} on ${on_string}: mapped reads"><actions><conditional name="refGenomeSource.genomeSource"><when value="indexed">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Put myExperiment URL in config's Beta features location.
by Bitbucket 11 Jun '12
by Bitbucket 11 Jun '12
11 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/17302ca3be4e/
changeset: 17302ca3be4e
user: jgoecks
date: 2012-06-11 21:22:51
summary: Put myExperiment URL in config's Beta features location.
affected #: 1 file
diff -r 0f32c2fc37cdc2959c86d2fdb0debe9a4c66282b -r 17302ca3be4e7b3d987733ea6e61982f9520996d universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -527,6 +527,9 @@
# be used for each "Set at Runtime" input, independent of others in the Workflow
#enable_unique_workflow_defaults = False
+# The URL to the myExperiment instance being used (omit scheme but include port)
+#myexperiment_url = www.myexperiment.org:80
+
# Enable Galaxy's "Upload via FTP" interface. You'll need to install and
# configure an FTP server (we've used ProFTPd since it can use Galaxy's
# database for authentication) and set the following two options.
@@ -681,9 +684,6 @@
#pbs_stage_path =
#pbs_dataset_server =
-# The URL to the myExperiment instance being used (omit scheme but include port)
-myexperiment_url = www.myexperiment.org:80
-
# This option allows users to see the full path of datasets via the "View
# Details" option in the history. Administrators can always see this.
#expose_dataset_path = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/66395a9d870f/
changeset: 66395a9d870f
user: jmchilton
date: 2012-06-06 03:35:33
summary: First attempt at updated dynamic job runners.
affected #: 3 files
diff -r 1890cb0d1cfbb3ef5a09affcdd18d2b8acf7d811 -r 66395a9d870fbe48660dbf75cc0a59264bb862f6 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -21,6 +21,7 @@
from galaxy.util.expressions import ExpressionContext
from galaxy.jobs.actions.post import ActionBox
from galaxy.exceptions import ObjectInvalid
+from galaxy.jobs.mapper import JobRunnerMapper
log = logging.getLogger( __name__ )
@@ -80,6 +81,7 @@
self.tool_provided_job_metadata = None
# Wrapper holding the info required to restore and clean up from files used for setting metadata externally
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job )
+ self.job_runner_mapper = JobRunnerMapper( self )
self.params = None
if job.params:
self.params = from_json_string( job.params )
@@ -88,7 +90,8 @@
self.__galaxy_system_pwent = None
def get_job_runner( self ):
- return self.tool.get_job_runner( self.params )
+ job_runner = self.job_runner_mapper.get_job_runner( self.params )
+ return job_runner
def get_job( self ):
return self.sa_session.query( model.Job ).get( self.job_id )
diff -r 1890cb0d1cfbb3ef5a09affcdd18d2b8acf7d811 -r 66395a9d870fbe48660dbf75cc0a59264bb862f6 lib/galaxy/jobs/mapper.py
--- /dev/null
+++ b/lib/galaxy/jobs/mapper.py
@@ -0,0 +1,82 @@
+import inspect, sys
+
+import galaxy.jobs.rules
+
+DYNAMIC_RUNNER_PREFIX = "dynamic:///"
+
+class JobRunnerMapper( object ):
+
+ def __init__( self, job_wrapper ):
+ self.job_wrapper = job_wrapper
+
+ def __invoke_expand_function( self, expand_function ):
+ function_arg_names = inspect.getargspec( expand_function ).args
+
+ possible_args = { "job_id" : self.job_wrapper.job_id,
+ "tool" : self.job_wrapper.tool,
+ "tool_id" : self.job_wrapper.tool.id,
+ "job_wrapper" : self.job_wrapper,
+ "app" : self.job_wrapper.app }
+
+ actual_args = {}
+
+ # Populate needed args
+ for possible_arg_name in possible_args:
+ if possible_arg_name in function_arg_names:
+ actual_args[ possible_arg_name ] = possible_args[ possible_arg_name ]
+
+ # Don't hit the DB to load the job object is not needed
+ if "job" in function_arg_names or "user" in function_arg_names or "user_email" in function_arg_names:
+ job = self.job_wrapper.get_job()
+ history = job.history
+ user = history and history.user
+ user_email = user and str(user.email)
+
+ if "job" in function_arg_names:
+ actual_args[ "job" ] = job
+
+ if "user" in function_arg_names:
+ actual_args[ "user" ] = user
+
+ if "user_email" in function_arg_names:
+ actual_args[ "user_email" ] = user_email
+
+ return expand_function( **actual_args )
+
+ def __determine_expand_function_name( self, option_parts ):
+ # default look for function with same name as tool, unless one specified
+ expand_function_name = self.job_wrapper.tool.id
+ if len( option_parts ) > 1:
+ expand_function_name = option_parts[ 1 ]
+ return expand_function_name
+
+ def __get_expand_function( self, expand_function_name ):
+ rules_module = sys.modules[ "galaxy.jobs.rules" ]
+ if hasattr( rules_module, expand_function_name ):
+ expand_function = getattr( rules_module, expand_function_name )
+ return expand_function
+ else:
+ raise Exception( "Dynamic job runner cannot find function to expand job runner type - %s" % expand_function_name )
+
+ def __expand_dynamic_job_runner( self, options_str ):
+ option_parts = options_str.split( '/' )
+ expand_type = option_parts[ 0 ]
+ if expand_type == "python":
+ expand_function_name = self.__determine_expand_function_name( option_parts )
+ expand_function = self.__get_expand_function( expand_function_name )
+ return self.__invoke_expand_function( expand_function )
+ else:
+ raise Exception( "Unhandled dynamic job runner type specified - %s" % calculation_type )
+
+ def __cache_job_runner( self, params ):
+ raw_job_runner = self.job_wrapper.tool.get_job_runner( params )
+ if raw_job_runner.startswith( DYNAMIC_RUNNER_PREFIX ):
+ job_runner = self.__expand_dynamic_job_runner( raw_job_runner[ len( DYNAMIC_RUNNER_PREFIX ) : ] )
+ else:
+ job_runner = raw_job_runner
+ self.cached_job_runner = job_runner
+
+ def get_job_runner( self, params ):
+ if not hasattr( self, 'cached_job_runner' ):
+ self.__cache_job_runner( params )
+ return self.cached_job_runner
diff -r 1890cb0d1cfbb3ef5a09affcdd18d2b8acf7d811 -r 66395a9d870fbe48660dbf75cc0a59264bb862f6 lib/galaxy/jobs/rules.py
--- /dev/null
+++ b/lib/galaxy/jobs/rules.py
@@ -0,0 +1,9 @@
+import logging
+
+log = logging.getLogger( __name__ )
+
+# Add functions to dynamically map job descriptions to job runners in
+# this file. These functions can optionally take in any of the
+# following arguments - job_wrapper, app, user_email, job, tool,
+# email, tool_id, and job_id.
+
https://bitbucket.org/galaxy/galaxy-central/changeset/f54b848298d4/
changeset: f54b848298d4
user: jmchilton
date: 2012-06-10 06:00:16
summary: Rework dynamic job runner config so that instead using a rules.py file
for storing rules, they should be placed in
lib/galaxy/jobs/rules/. The rules submodules are "searched" in
lexiographically allowing for hierarchical configuration overrides
(e.g. naming schemes like: 000_galaxy_rules.py, 100_site_rules.py,
200_instance_rules.py)
affected #: 3 files
diff -r 66395a9d870fbe48660dbf75cc0a59264bb862f6 -r f54b848298d4d78d6fbcc85e9887960f21b5c239 lib/galaxy/jobs/mapper.py
--- a/lib/galaxy/jobs/mapper.py
+++ b/lib/galaxy/jobs/mapper.py
@@ -1,13 +1,50 @@
-import inspect, sys
+import logging
+import inspect
+import os
+
+log = logging.getLogger( __name__ )
import galaxy.jobs.rules
DYNAMIC_RUNNER_PREFIX = "dynamic:///"
class JobRunnerMapper( object ):
-
+ """
+ This class is responsible to managing the mapping of jobs
+ (in the form of job_wrappers) to job runner strings.
+ """
+
def __init__( self, job_wrapper ):
self.job_wrapper = job_wrapper
+ self.rule_modules = self.__get_rule_modules( )
+
+ def __get_rule_modules( self ):
+ unsorted_module_names = self.__get_rule_module_names( )
+ ## Load modules in reverse order to allow hierarchical overrides
+ ## i.e. 000_galaxy_rules.py, 100_site_rules.py, 200_instance_rules.py
+ module_names = sorted( unsorted_module_names, reverse=True )
+ modules = []
+ for rule_module_name in module_names:
+ try:
+ module = __import__( rule_module_name )
+ for comp in rule_module_name.split( "." )[1:]:
+ module = getattr( module, comp )
+ modules.append( module )
+ except BaseException, exception:
+ exception_str = str( exception )
+ message = "%s rule module could not be loaded: %s" % ( rule_module_name, exception_str )
+ log.debug( message )
+ continue
+ return modules
+
+ def __get_rule_module_names( self ):
+ rules_dir = galaxy.jobs.rules.__path__[0]
+ names = []
+ for fname in os.listdir( rules_dir ):
+ if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+ rule_module_name = "galaxy.jobs.rules.%s" % fname[:-len(".py")]
+ names.append( rule_module_name )
+ return names
def __invoke_expand_function( self, expand_function ):
function_arg_names = inspect.getargspec( expand_function ).args
@@ -15,7 +52,7 @@
possible_args = { "job_id" : self.job_wrapper.job_id,
"tool" : self.job_wrapper.tool,
"tool_id" : self.job_wrapper.tool.id,
- "job_wrapper" : self.job_wrapper,
+ "job_wrapper" : self.job_wrapper,
"app" : self.job_wrapper.app }
actual_args = {}
@@ -25,7 +62,7 @@
if possible_arg_name in function_arg_names:
actual_args[ possible_arg_name ] = possible_args[ possible_arg_name ]
- # Don't hit the DB to load the job object is not needed
+ # Don't hit the DB to load the job object if not needed
if "job" in function_arg_names or "user" in function_arg_names or "user_email" in function_arg_names:
job = self.job_wrapper.get_job()
history = job.history
@@ -51,13 +88,21 @@
return expand_function_name
def __get_expand_function( self, expand_function_name ):
- rules_module = sys.modules[ "galaxy.jobs.rules" ]
- if hasattr( rules_module, expand_function_name ):
- expand_function = getattr( rules_module, expand_function_name )
+ matching_rule_module = self.__last_rule_module_with_function( expand_function_name )
+ if matching_rule_module:
+ expand_function = getattr( matching_rule_module, expand_function_name )
return expand_function
else:
raise Exception( "Dynamic job runner cannot find function to expand job runner type - %s" % expand_function_name )
-
+
+ def __last_rule_module_with_function( self, function_name ):
+ # self.rule_modules is sorted in reverse order, so find first
+ # wiht function
+ for rule_module in self.rule_modules:
+ if hasattr( rule_module, function_name ):
+ return rule_module
+ return None
+
def __expand_dynamic_job_runner( self, options_str ):
option_parts = options_str.split( '/' )
expand_type = option_parts[ 0 ]
@@ -77,6 +122,9 @@
self.cached_job_runner = job_runner
def get_job_runner( self, params ):
+ """
+ Cache the job_runner string to avoid recalculation.
+ """
if not hasattr( self, 'cached_job_runner' ):
self.__cache_job_runner( params )
return self.cached_job_runner
diff -r 66395a9d870fbe48660dbf75cc0a59264bb862f6 -r f54b848298d4d78d6fbcc85e9887960f21b5c239 lib/galaxy/jobs/rules.py
--- a/lib/galaxy/jobs/rules.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import logging
-
-log = logging.getLogger( __name__ )
-
-# Add functions to dynamically map job descriptions to job runners in
-# this file. These functions can optionally take in any of the
-# following arguments - job_wrapper, app, user_email, job, tool,
-# email, tool_id, and job_id.
-
https://bitbucket.org/galaxy/galaxy-central/changeset/0f32c2fc37cd/
changeset: 0f32c2fc37cd
user: natefoo
date: 2012-06-11 16:26:12
summary: Merged in jmchilton/umn-galaxy-central (pull request #47)
affected #: 3 files
diff -r 1df26d9240bb4e115cdd8cd019673c56c8e4ad67 -r 0f32c2fc37cdc2959c86d2fdb0debe9a4c66282b lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -21,6 +21,7 @@
from galaxy.util.expressions import ExpressionContext
from galaxy.jobs.actions.post import ActionBox
from galaxy.exceptions import ObjectInvalid
+from galaxy.jobs.mapper import JobRunnerMapper
log = logging.getLogger( __name__ )
@@ -80,6 +81,7 @@
self.tool_provided_job_metadata = None
# Wrapper holding the info required to restore and clean up from files used for setting metadata externally
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job )
+ self.job_runner_mapper = JobRunnerMapper( self )
self.params = None
if job.params:
self.params = from_json_string( job.params )
@@ -88,7 +90,8 @@
self.__galaxy_system_pwent = None
def get_job_runner( self ):
- return self.tool.get_job_runner( self.params )
+ job_runner = self.job_runner_mapper.get_job_runner( self.params )
+ return job_runner
def get_job( self ):
return self.sa_session.query( model.Job ).get( self.job_id )
diff -r 1df26d9240bb4e115cdd8cd019673c56c8e4ad67 -r 0f32c2fc37cdc2959c86d2fdb0debe9a4c66282b lib/galaxy/jobs/mapper.py
--- /dev/null
+++ b/lib/galaxy/jobs/mapper.py
@@ -0,0 +1,130 @@
+import logging
+import inspect
+import os
+
+log = logging.getLogger( __name__ )
+
+import galaxy.jobs.rules
+
+DYNAMIC_RUNNER_PREFIX = "dynamic:///"
+
+class JobRunnerMapper( object ):
+ """
+ This class is responsible to managing the mapping of jobs
+ (in the form of job_wrappers) to job runner strings.
+ """
+
+ def __init__( self, job_wrapper ):
+ self.job_wrapper = job_wrapper
+ self.rule_modules = self.__get_rule_modules( )
+
+ def __get_rule_modules( self ):
+ unsorted_module_names = self.__get_rule_module_names( )
+ ## Load modules in reverse order to allow hierarchical overrides
+ ## i.e. 000_galaxy_rules.py, 100_site_rules.py, 200_instance_rules.py
+ module_names = sorted( unsorted_module_names, reverse=True )
+ modules = []
+ for rule_module_name in module_names:
+ try:
+ module = __import__( rule_module_name )
+ for comp in rule_module_name.split( "." )[1:]:
+ module = getattr( module, comp )
+ modules.append( module )
+ except BaseException, exception:
+ exception_str = str( exception )
+ message = "%s rule module could not be loaded: %s" % ( rule_module_name, exception_str )
+ log.debug( message )
+ continue
+ return modules
+
+ def __get_rule_module_names( self ):
+ rules_dir = galaxy.jobs.rules.__path__[0]
+ names = []
+ for fname in os.listdir( rules_dir ):
+ if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+ rule_module_name = "galaxy.jobs.rules.%s" % fname[:-len(".py")]
+ names.append( rule_module_name )
+ return names
+
+ def __invoke_expand_function( self, expand_function ):
+ function_arg_names = inspect.getargspec( expand_function ).args
+
+ possible_args = { "job_id" : self.job_wrapper.job_id,
+ "tool" : self.job_wrapper.tool,
+ "tool_id" : self.job_wrapper.tool.id,
+ "job_wrapper" : self.job_wrapper,
+ "app" : self.job_wrapper.app }
+
+ actual_args = {}
+
+ # Populate needed args
+ for possible_arg_name in possible_args:
+ if possible_arg_name in function_arg_names:
+ actual_args[ possible_arg_name ] = possible_args[ possible_arg_name ]
+
+ # Don't hit the DB to load the job object if not needed
+ if "job" in function_arg_names or "user" in function_arg_names or "user_email" in function_arg_names:
+ job = self.job_wrapper.get_job()
+ history = job.history
+ user = history and history.user
+ user_email = user and str(user.email)
+
+ if "job" in function_arg_names:
+ actual_args[ "job" ] = job
+
+ if "user" in function_arg_names:
+ actual_args[ "user" ] = user
+
+ if "user_email" in function_arg_names:
+ actual_args[ "user_email" ] = user_email
+
+ return expand_function( **actual_args )
+
+ def __determine_expand_function_name( self, option_parts ):
+ # default look for function with same name as tool, unless one specified
+ expand_function_name = self.job_wrapper.tool.id
+ if len( option_parts ) > 1:
+ expand_function_name = option_parts[ 1 ]
+ return expand_function_name
+
+ def __get_expand_function( self, expand_function_name ):
+ matching_rule_module = self.__last_rule_module_with_function( expand_function_name )
+ if matching_rule_module:
+ expand_function = getattr( matching_rule_module, expand_function_name )
+ return expand_function
+ else:
+ raise Exception( "Dynamic job runner cannot find function to expand job runner type - %s" % expand_function_name )
+
+ def __last_rule_module_with_function( self, function_name ):
+ # self.rule_modules is sorted in reverse order, so find first
+ # wiht function
+ for rule_module in self.rule_modules:
+ if hasattr( rule_module, function_name ):
+ return rule_module
+ return None
+
+ def __expand_dynamic_job_runner( self, options_str ):
+ option_parts = options_str.split( '/' )
+ expand_type = option_parts[ 0 ]
+ if expand_type == "python":
+ expand_function_name = self.__determine_expand_function_name( option_parts )
+ expand_function = self.__get_expand_function( expand_function_name )
+ return self.__invoke_expand_function( expand_function )
+ else:
+ raise Exception( "Unhandled dynamic job runner type specified - %s" % calculation_type )
+
+ def __cache_job_runner( self, params ):
+ raw_job_runner = self.job_wrapper.tool.get_job_runner( params )
+ if raw_job_runner.startswith( DYNAMIC_RUNNER_PREFIX ):
+ job_runner = self.__expand_dynamic_job_runner( raw_job_runner[ len( DYNAMIC_RUNNER_PREFIX ) : ] )
+ else:
+ job_runner = raw_job_runner
+ self.cached_job_runner = job_runner
+
+ def get_job_runner( self, params ):
+ """
+ Cache the job_runner string to avoid recalculation.
+ """
+ if not hasattr( self, 'cached_job_runner' ):
+ self.__cache_job_runner( params )
+ return self.cached_job_runner
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Handle Tophat2 fusion-ignore-chromosome parameter correctly.
by Bitbucket 11 Jun '12
by Bitbucket 11 Jun '12
11 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1df26d9240bb/
changeset: 1df26d9240bb
user: jgoecks
date: 2012-06-11 14:49:31
summary: Handle Tophat2 fusion-ignore-chromosome parameter correctly.
affected #: 2 files
diff -r 0edd1d65d746a806de155d3d37428ac84b4265ff -r 1df26d9240bb4e115cdd8cd019673c56c8e4ad67 tools/ngs_rna/tophat2_wrapper.py
--- a/tools/ngs_rna/tophat2_wrapper.py
+++ b/tools/ngs_rna/tophat2_wrapper.py
@@ -200,10 +200,12 @@
# Fusion search options.
if options.fusion_search:
- opts += ' --fusion-search --fusion-anchor-length %i --fusion-min-dist %i --fusion-read-mismatches %i --fusion-multireads %i --fusion-multipairs %i --fusion-ignore-chromosomes %s' % \
+ opts += ' --fusion-search --fusion-anchor-length %i --fusion-min-dist %i --fusion-read-mismatches %i --fusion-multireads %i --fusion-multipairs %i' % \
( int( options.fusion_anchor_length ), int( options.fusion_min_dist ),
int( options.fusion_read_mismatches ), int( options.fusion_multireads ),
- int( options.fusion_multipairs ), options.fusion_ignore_chromosomes )
+ int( options.fusion_multipairs ) )
+ if options.fusion_ignore_chromosomes:
+ opts += ' --fusion-ignore-chromosomes %s' % options.fusion_ignore_chromosomes
# Bowtie2 options.
if options.b2_very_fast:
diff -r 0edd1d65d746a806de155d3d37428ac84b4265ff -r 1df26d9240bb4e115cdd8cd019673c56c8e4ad67 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -103,7 +103,7 @@
--fusion-read-mismatches $params.fusion_search.read_mismatches
--fusion-multireads $params.fusion_search.multireads
--fusion-multipairs $params.fusion_search.multipairs
- --fusion-ignore-chromosomes $params.fusion_search.ignore_chromosomes
+ --fusion-ignore-chromosomes "$params.fusion_search.ignore_chromosomes"
#end if
#if $params.bowtie2_settings.b2_settings == "Yes":
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Backbone-ify server-state deferred and use when running tools in Trackster.
by Bitbucket 08 Jun '12
by Bitbucket 08 Jun '12
08 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0edd1d65d746/
changeset: 0edd1d65d746
user: jgoecks
date: 2012-06-08 21:36:02
summary: Backbone-ify server-state deferred and use when running tools in Trackster.
affected #: 3 files
diff -r be2577bfdbac49309f962b0b9a48ab11bac6159b -r 0edd1d65d746a806de155d3d37428ac84b4265ff static/scripts/viz/paramamonster.js
--- a/static/scripts/viz/paramamonster.js
+++ b/static/scripts/viz/paramamonster.js
@@ -244,8 +244,17 @@
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; });
+ // Set up behavior when node is clicked.
node.on("click", function(d, i) {
- console.log(d, i);
+ console.log(d, i);
+
+ // Gather: (a) dataset of interest; (b) region(s) of interest and (c) sets of parameters based on node clicked.
+
+ // Run job by submitting parameters + dataset as job inputs; get dataset ids as result.
+
+ // Create tracks for all resulting dataset ids.
+
+ // Display tiles for region(s) of interest.
});
node.append("circle")
diff -r be2577bfdbac49309f962b0b9a48ab11bac6159b -r 0edd1d65d746a806de155d3d37428ac84b4265ff static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -25,28 +25,6 @@
};
/**
- * Provides support for server-state based deferred. Server is repeatedly polled, and when
- * condition is met, deferred is resolved.
- */
-var server_state_deferred = function(url, url_params, interval, success_fn) {
- var deferred = $.Deferred(),
- go = function() {
- $.getJSON(url, url_params, function(result) {
- if (success_fn(result)) {
- // Result is good, so resolve.
- deferred.resolve(result);
- }
- else {
- // Result not good, try again.
- setTimeout(go, interval);
- }
- });
- };
- go();
- return deferred;
-};
-
-/**
* Find browser's requestAnimationFrame method or fallback on a setTimeout
*/
var requestAnimationFrame = (function(){
@@ -1950,7 +1928,10 @@
this.run(url_params, new_track,
// Success callback.
function(track_data) {
- new_track.dataset_id = track_data.dataset_id;
+ new_track.set_dataset(new Dataset({
+ id: track_data.dataset_id,
+ hda_ldda: track_data.hda_ldda
+ }));
new_track.tiles_div.text("Running job.");
new_track.init();
}
@@ -1960,36 +1941,36 @@
* Run tool using a set of URL params and a success callback.
*/
run: function(url_params, new_track, success_callback) {
- // Add tool params to URL params.
- $.extend(url_params, this.get_param_values_dict());
+ // Run tool.
+ var ss_deferred = new ServerStateDeferred({
+ url: rerun_tool_url,
+ url_params: $.extend(url_params, this.get_param_values_dict()),
+ interval: 2000,
+ success_fn: function(response) {
+ return response !== "pending";
+ }
+ });
- // Run tool.
- // TODO: rewrite to use server state deferred.
- var json_run_tool = function() {
- $.getJSON(rerun_tool_url, url_params, function(response) {
- if (response === "no converter") {
- // No converter available for input datasets, so cannot run tool.
- new_track.container_div.addClass("error");
- new_track.content_div.text(DATA_NOCONVERTER);
- }
- else if (response.error) {
- // General error.
- new_track.container_div.addClass("error");
- new_track.content_div.text(DATA_CANNOT_RUN_TOOL + response.message);
- }
- else if (response === "pending") {
- // Converting/indexing input datasets; show message and try again.
- new_track.container_div.addClass("pending");
- new_track.content_div.text("Converting input data so that it can be used quickly with tool.");
- setTimeout(json_run_tool, 2000);
- }
- else {
- // Job submitted and running.
- success_callback(response);
- }
- });
- };
- json_run_tool();
+ // Start with this status message.
+ //new_track.container_div.addClass("pending");
+ //new_track.content_div.text("Converting input data so that it can be used quickly with tool.");
+
+ $.when(ss_deferred.go()).then(function(response) {
+ if (response === "no converter") {
+ // No converter available for input datasets, so cannot run tool.
+ new_track.container_div.addClass("error");
+ new_track.content_div.text(DATA_NOCONVERTER);
+ }
+ else if (response.error) {
+ // General error.
+ new_track.container_div.addClass("error");
+ new_track.content_div.text(DATA_CANNOT_RUN_TOOL + response.message);
+ }
+ else {
+ // Job submitted and running.
+ success_callback(response);
+ }
+ });
}
});
@@ -3765,7 +3746,7 @@
init_for_tool_data: function() {
// Set up track to fetch initial data from raw data URL when the dataset--not the converted datasets--
// is ready.
- this.data_url = raw_data_url;
+ this.data_manager.set('data_url', raw_data_url);
this.data_query_wait = 1000;
this.dataset_check_url = dataset_state_url;
@@ -3786,15 +3767,16 @@
self.data_query_wait = DEFAULT_DATA_QUERY_WAIT;
// Reset data URL when dataset indexing has completed/when not pending.
- $.when(
+ var ss_deferred = new ServerStateDeferred({
+ url: self.dataset_state_url,
+ url_params: {dataset_id : self.dataset_id, hda_ldda: self.hda_ldda},
+ interval: self.data_query_wait,
// Set up deferred to check dataset state until it is not pending.
- server_state_deferred(self.dataset_state_url,
- {dataset_id : self.dataset_id, hda_ldda: self.hda_ldda},
- self.data_query_wait,
- function(result) { return result !== "pending" })
- ).then(function() {
+ success_fn: function(result) { return result !== "pending" }
+ });
+ $.when(ss_deferred.go()).then(function() {
// Dataset is indexed, so use default data URL.
- self.data_url = default_data_url;
+ self.data_manager.set('data_url', default_data_url);
});
// Reset post-draw actions function.
@@ -4371,6 +4353,12 @@
this.set_painter_from_config();
};
extend(FeatureTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
+ set_dataset: function(dataset) {
+ this.dataset_id = dataset.get('id');
+ this.hda_ldda = dataset.get('hda_ldda');
+ this.data_manager.set('dataset', dataset);
+ },
+
set_painter_from_config: function() {
if ( this.config.values['connector_style'] === 'arcs' ) {
this.painter = painters.ArcLinkedFeaturePainter;
diff -r be2577bfdbac49309f962b0b9a48ab11bac6159b -r 0edd1d65d746a806de155d3d37428ac84b4265ff static/scripts/viz/visualization.js
--- a/static/scripts/viz/visualization.js
+++ b/static/scripts/viz/visualization.js
@@ -9,6 +9,43 @@
// --------- Models ---------
/**
+ * Implementation of a server-state based deferred. Server is repeatedly polled, and when
+ * condition is met, deferred is resolved.
+ */
+var ServerStateDeferred = Backbone.Model.extend({
+ defaults: {
+ url: null,
+ url_params: {},
+ interval: 1000,
+ success_fn: function(result) { return true; }
+ },
+
+ /**
+ * Returns a deferred that resolves when success function returns true.
+ */
+ go: function() {
+ var deferred = $.Deferred(),
+ self = this,
+ success_fn = self.get('success_fn'),
+ interval = self.get('interval'),
+ _go = function() {
+ $.getJSON(self.get('url'), self.get('url_params'), function(result) {
+ if (success_fn(result)) {
+ // Result is good, so resolve.
+ deferred.resolve(result);
+ }
+ else {
+ // Result not good, try again.
+ setTimeout(_go, interval);
+ }
+ });
+ };
+ _go();
+ return deferred;
+ }
+});
+
+/**
* Generic cache that handles key/value pairs.
*/
var Cache = Backbone.Model.extend({
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Add min, max insert size parameters to bowtie2 wrapper.
by Bitbucket 08 Jun '12
by Bitbucket 08 Jun '12
08 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/be2577bfdbac/
changeset: be2577bfdbac
user: jgoecks
date: 2012-06-08 20:03:41
summary: Add min, max insert size parameters to bowtie2 wrapper.
affected #: 2 files
diff -r 6f15c9e850ab60fa027f0eccd445aad69c45eef9 -r be2577bfdbac49309f962b0b9a48ab11bac6159b tools/sr_mapping/bowtie2_wrapper.py
--- a/tools/sr_mapping/bowtie2_wrapper.py
+++ b/tools/sr_mapping/bowtie2_wrapper.py
@@ -18,6 +18,8 @@
parser.add_option( '-1', '--input1', dest='input1', help='The (forward or single-end) reads file in Sanger FASTQ format' )
parser.add_option( '-2', '--input2', dest='input2', help='The reverse reads file in Sanger FASTQ format' )
parser.add_option( '', '--single-paired', dest='single_paired', help='' )
+ parser.add_option( '-I', '--minins', dest='min_insert' )
+ parser.add_option( '-X', '--maxins', dest='max_insert' )
parser.add_option( '', '--settings', dest='settings', help='' )
parser.add_option( '', '--end-to-end', dest='end_to_end', action="store_true" )
parser.add_option( '', '--local', dest='local', action="store_true" )
@@ -73,6 +75,11 @@
# Set up options.
opts = '-p %s' % ( options.num_threads )
+ if options.single_paired == 'paired':
+ if options.min_insert:
+ opts += ' -I %s' % options.min_insert
+ if options.max_insert:
+ opts += ' -X %s' % options.max_insert
if options.settings == 'preSet':
pass
else:
diff -r 6f15c9e850ab60fa027f0eccd445aad69c45eef9 -r be2577bfdbac49309f962b0b9a48ab11bac6159b tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -30,6 +30,8 @@
## Second input only if input is paired-end.
#if $singlePaired.sPaired == "paired"
--input2=$singlePaired.input2
+ -I $singlePaired.minInsert
+ -X $singlePaired.maxInsert
#end if
## Set params.
@@ -56,6 +58,8 @@
<param format="fastqsanger" name="input1" type="data" label="FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" /><param format="fastqsanger" name="input2" type="data" label="FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" /><!-- TODO: paired-end specific parameters. -->
+ <param name="minInsert" type="integer" value="0" label="Minimum insert size for valid paired-end alignments" />
+ <param name="maxInsert" type="integer" value="250" label="Maximum insert size for valid paired-end alignments" /></when></conditional><conditional name="refGenomeSource">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

08 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6f15c9e850ab/
changeset: 6f15c9e850ab
user: natefoo
date: 2012-06-08 19:52:06
summary: Fix API history undelete, issue #763.
affected #: 1 file
diff -r d69ccbd53ea295e0a4ad0068317bf7238eb1f3b8 -r 6f15c9e850ab60fa027f0eccd445aad69c45eef9 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -153,6 +153,7 @@
POST /api/histories/deleted/{encoded_quota_id}/undelete
Undeletes a quota
"""
+ history_id = id
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
history.deleted = False
trans.sa_session.add( history )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

08 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d69ccbd53ea2/
changeset: d69ccbd53ea2
user: jgoecks
date: 2012-06-08 19:48:37
summary: Fix bug in setting Bowtie2 parameters.
affected #: 1 file
diff -r 2f2870f4f67d2198c69b44af688b5b37174459b0 -r d69ccbd53ea295e0a4ad0068317bf7238eb1f3b8 tools/sr_mapping/bowtie2_wrapper.xml
--- a/tools/sr_mapping/bowtie2_wrapper.xml
+++ b/tools/sr_mapping/bowtie2_wrapper.xml
@@ -35,10 +35,12 @@
## Set params.
--settings=$params.settingsType
- #if str($params.align_type) == "end_to_end":
- --end-to-end --preset-alignment=$params.preset.align_preset_select
- #else:
- --local --preset-alignment=$params.preset.align_preset_select-local
+ #if $params.settingsType == "full":
+ #if str($params.align_type) == "end_to_end":
+ --end-to-end --preset-alignment=$params.preset.align_preset_select
+ #else:
+ --local --preset-alignment=$params.preset.align_preset_select-local
+ #end if
#end if
</command><inputs>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0