galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: greg: 1) Enhance the InstallManager to provide the installer the ability to choose to install tool dependenceis (or not) rather than automatically installing them.
by Bitbucket 01 Jun '12
by Bitbucket 01 Jun '12
01 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/018179ad4c9b/
changeset: 018179ad4c9b
user: greg
date: 2012-06-01 20:51:41
summary: 1) Enhance the InstallManager to provide the installer the ability to choose to install tool dependenceis (or not) rather than automatically installing them.
2) Persist changes to the tool_data_table_conf.xml file only if within the Galaxy webapp.
3) Apply the 1-liner fix from change set 9ffef0de07f5 in Peter van Heusden's pull request which fixes a problem where the __init__ tests if config.database_connection is None.
affected #: 8 files
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -31,6 +31,11 @@
db_url = self.config.database_connection
else:
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+ # Set up the tool sheds registry
+ if os.path.isfile( self.config.tool_sheds_config ):
+ self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ else:
+ self.tool_shed_registry = None
# Initialize database / check for appropriate schema version. # If this
# is a new installation, we'll restrict the tool migration messaging.
from galaxy.model.migrate.check import create_or_verify_database
@@ -47,11 +52,6 @@
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
object_store = self.object_store )
- # Set up the tool sheds registry
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,9 +7,12 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
+from galaxy.tool_shed.migrate.common import *
+
+REPOSITORY_OWNER = 'devteam'
class InstallManager( object ):
- def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config ):
+ def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
"""
Check tool settings in tool_shed_install_config and install all repositories that are not already installed. The tool
panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
@@ -30,9 +33,9 @@
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = 'devteam'
+ self.repository_owner = REPOSITORY_OWNER
for repository_elem in root:
- self.install_repository( repository_elem )
+ self.install_repository( repository_elem, install_dependencies )
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
found = False
for root, dirs, files in os.walk( relative_install_dir ):
@@ -117,7 +120,8 @@
if not is_displayed:
is_displayed = True
return is_displayed, tool_sections
- def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision, ctx_rev ):
+ def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision,
+ ctx_rev, install_dependencies ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
# The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places
@@ -162,7 +166,7 @@
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied )
- if 'tool_dependencies' in metadata_dict:
+ if install_dependencies and 'tool_dependencies' in metadata_dict:
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( self.app,
'tool_dependencies.xml',
@@ -220,7 +224,7 @@
except:
pass
return tool_shed_repository, metadata_dict
- def install_repository( self, repository_elem ):
+ def install_repository( self, repository_elem, install_dependencies ):
# Install a single repository, loading contained tools into the tool panel.
name = repository_elem.get( 'name' )
description = repository_elem.get( 'description' )
@@ -241,7 +245,8 @@
name,
description,
changeset_revision,
- ctx_rev )
+ ctx_rev,
+ install_dependencies )
if 'tools' in metadata_dict:
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -6,6 +6,7 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
from common import *
+from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
@@ -44,13 +45,20 @@
if latest_tool_migration_script_number != db_schema.version:
if app.new_installation:
# New installations will not be missing tools, so we don't need to worry about them.
- missing_tool_configs = []
+ missing_tool_configs_dict = odict()
else:
tool_panel_configs = get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
- missing_tool_configs = check_for_missing_tools( tool_panel_configs, latest_tool_migration_script_number )
+ # The missing_tool_configs_dict contents are something like:
+ # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+ missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
else:
- missing_tool_configs = []
+ missing_tool_configs_dict = odict()
+ have_tool_dependencies = False
+ for k, v in missing_tool_configs_dict.items():
+ if v:
+ have_tool_dependencies = True
+ break
config_arg = ''
if os.path.abspath( os.path.join( os.getcwd(), 'universe_wsgi.ini' ) ) != galaxy_config_file:
config_arg = ' -c %s' % galaxy_config_file.replace( os.path.abspath( os.getcwd() ), '.' )
@@ -62,7 +70,7 @@
output = proc.stdout.read( 32768 )
if return_code != 0:
raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output )
- elif missing_tool_configs:
+ elif missing_tool_configs_dict:
if len( tool_panel_configs ) == 1:
plural = ''
tool_panel_config_file_names = tool_panel_configs[ 0 ]
@@ -71,8 +79,8 @@
tool_panel_config_file_names = ', '.join( tool_panel_configs )
msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
msg += "\n\nThe list of files at the end of this message refers to tools that are configured to load into the tool panel for\n"
- msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools can be automatically installed\n"
- msg += "from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
+ msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools and their dependencies can be\n"
+ msg += "automatically installed from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use). If you do this,\n"
msg += "be aware that these tools will no longer be available in your Galaxy tool panel, and entries for each of them should\n"
msg += "be removed from your file%s named %s.\n\n" % ( plural, tool_panel_config_file_names )
@@ -87,17 +95,45 @@
msg += "configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed\n"
msg += "repositories if they are in directories that pose conflicts. See mercurial's .hgignore documentation at the following\n"
msg += "URL for details.\n\nhttp://mercurial.selenic.com/wiki/.hgignore\n\n"
- msg += output
+ if have_tool_dependencies:
+ msg += "The following tool dependencies can also optionally be installed (see the option flag in the command below). If you\n"
+ msg += "choose to install them (recommended), they will be installed within the location specified by the 'tool_dependency_dir'\n"
+ msg += "setting in your main Galaxy configuration file (e.g., uninverse_wsgi.ini).\n"
+ processed_tool_dependencies = []
+ for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
+ for tool_dependencies_tup in tool_dependencies:
+ if tool_dependencies_tup not in processed_tool_dependencies:
+ msg += "------------------------------------\n"
+ msg += "Tool Dependency\n"
+ msg += "------------------------------------\n"
+ msg += "Name: %s, Version: %s, Type: %s\n" % ( tool_dependencies_tup[ 0 ],
+ tool_dependencies_tup[ 1 ],
+ tool_dependencies_tup[ 2 ] )
+ if tool_dependencies_tup[ 3 ]:
+ msg += "Requirements and installation information:\n"
+ msg += "%s\n" % tool_dependencies_tup[ 3 ]
+ else:
+ msg += "\n"
+ msg += "------------------------------------\n"
+ processed_tool_dependencies.append( tool_dependencies_tup )
+ msg += "\n"
+ msg += "%s" % output.replace( 'done', '' )
+ msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
+ msg += "sh ./scripts/migrate_tools/%04d_tools.sh\n" % latest_tool_migration_script_number
+ msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
+ if have_tool_dependencies:
+ msg += "The tool dependencies listed above will be installed along with the repositories if you add the 'install_dependencies'\n"
+ msg += "option to the above command like this:\n\n"
+ msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
+ msg += "sh ./scripts/migrate_tools/%04d_tools.sh install_dependencies\n" % latest_tool_migration_script_number
+ msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
+ msg += "Tool dependencies can be installed after the repositories have been installed, but installing them now is better.\n\n"
msg += "After the installation process finishes, you can start your Galaxy server. As part of this installation process,\n"
msg += "entries for each of the following tool config files will be added to the file named ./migrated_tool_conf.xml, so these\n"
msg += "tools will continue to be loaded into your tool panel. Because of this, existing entries for these files should be\n"
msg += "removed from your file%s named %s, but only after the installation process finishes.\n\n" % ( plural, tool_panel_config_file_names )
- for i, missing_tool_config in enumerate( missing_tool_configs ):
+ for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
msg += "%s\n" % missing_tool_config
- # Should we do the following?
- #if i > 10:
- # msg += "\n...and %d more tools...\n" % ( len( missing_tool_configs ) - ( i + 1 ) )
- # break
msg += "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"
raise Exception( msg )
else:
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -1,4 +1,4 @@
-import sys, os, ConfigParser
+import sys, os, ConfigParser, urllib2
import galaxy.config
import galaxy.datatypes.registry
from galaxy import util, tools
@@ -7,39 +7,63 @@
from galaxy.objectstore import build_object_store_from_config
import galaxy.tool_shed.tool_shed_registry
from galaxy.tool_shed import install_manager
-from galaxy.tool_shed.migrate.common import *
+from galaxy.tool_shed.encoding_util import *
+from galaxy.util.odict import odict
-def check_for_missing_tools( tool_panel_configs, latest_tool_migration_script_number ):
+REPOSITORY_OWNER = 'devteam'
+
+def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
# Get the 000x_tools.xml file associated with the current migrate_tools version number.
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
- migrated_tool_configs = []
+ migrated_tool_configs_dict = odict()
tree = util.parse_xml( tools_xml_file_path )
root = tree.getroot()
+ tool_shed = root.get( 'name' )
+ tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
for elem in root:
if elem.tag == 'repository':
+ tool_dependencies = []
+ tool_dependencies_dict = {}
+ repository_name = elem.get( 'name' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&webapp=install_manager&no_reset=true' % \
+ ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_dependencies_dict = tool_shed_decode( text )
+ for dependency_key, requirements_dict in tool_dependencies_dict.items():
+ tool_dependency_name = requirements_dict[ 'name' ]
+ tool_dependency_version = requirements_dict[ 'version' ]
+ tool_dependency_type = requirements_dict[ 'type' ]
+ tool_dependency_readme = requirements_dict.get( 'readme', '' )
+ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
for tool_elem in elem.findall( 'tool' ):
- migrated_tool_configs.append( tool_elem.get( 'file' ) )
+ migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
# Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
- missing_tool_configs = []
+ missing_tool_configs_dict = odict()
for tool_panel_config in tool_panel_configs:
tree = util.parse_xml( tool_panel_config )
root = tree.getroot()
for elem in root:
+ missing_tool_dependencies = []
if elem.tag == 'tool':
- missing_tool_configs = check_tool_tag_set( elem, migrated_tool_configs, missing_tool_configs )
+ missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
elif elem.tag == 'section':
for section_elem in elem:
if section_elem.tag == 'tool':
- missing_tool_configs = check_tool_tag_set( section_elem, migrated_tool_configs, missing_tool_configs )
- return missing_tool_configs
-def check_tool_tag_set( elem, migrated_tool_configs, missing_tool_configs ):
+ missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ return missing_tool_configs_dict
+def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
file_path = elem.get( 'file', None )
if file_path:
path, name = os.path.split( file_path )
- if name in migrated_tool_configs:
- missing_tool_configs.append( name )
- return missing_tool_configs
+ if name in migrated_tool_configs_dict:
+ tool_dependencies = migrated_tool_configs_dict[ name ]
+ missing_tool_configs_dict[ name ] = tool_dependencies
+ return missing_tool_configs_dict
def get_non_shed_tool_panel_configs( app ):
# Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
config_filenames = []
@@ -52,9 +76,18 @@
if tool_path is None:
config_filenames.append( config_filename )
return config_filenames
+def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ return None
+
class MigrateToolsApplication( object ):
"""Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
def __init__( self, tools_migration_config ):
+ install_dependencies = 'install_dependencies' in sys.argv
galaxy_config_file = 'universe_wsgi.ini'
if '-c' in sys.argv:
pos = sys.argv.index( '-c' )
@@ -69,7 +102,7 @@
for key, value in config_parser.items( "app:main" ):
galaxy_config_dict[ key ] = value
self.config = galaxy.config.Configuration( **galaxy_config_dict )
- if self.config.database_connection is None:
+ if not self.config.database_connection:
self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
self.config.update_integrated_tool_panel = True
self.object_store = build_object_store_from_config( self.config )
@@ -106,7 +139,8 @@
'scripts',
'migrate_tools',
tools_migration_config ),
- migrated_tools_config=self.config.migrated_tools_config )
+ migrated_tools_config=self.config.migrated_tools_config,
+ install_dependencies=install_dependencies )
@property
def sa_session( self ):
return self.model.context.current
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/migrate/versions/0002_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0002_tools.py
+++ b/lib/galaxy/tool_shed/migrate/versions/0002_tools.py
@@ -3,10 +3,6 @@
datatypes_conf.xml.sample. You should remove the Emboss datatypes from your version of datatypes_conf.xml. The
repositories named emboss_5 and emboss_datatypes from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
will be installed into your local Galaxy instance at the location discussed above by running the following command.
-
-vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
-sh ./scripts/migrate_tools/0002_tools.sh
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
import sys
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -41,7 +41,7 @@
self.data_tables[ table.name ] = table
log.debug( "Loaded tool data table '%s'", table.name )
return table_elems
- def add_new_entries_from_config_file( self, config_filename, tool_data_table_config_path ):
+ def add_new_entries_from_config_file( self, config_filename, tool_data_table_config_path, persist=False ):
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example:
@@ -78,7 +78,7 @@
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Added new tool data table '%s'", table.name )
- if self.data_table_elem_names != original_data_table_elem_names:
+ if persist and self.data_table_elem_names != original_data_table_elem_names:
# Persist Galaxy's version of the changed tool_data_table_conf.xml file.
self.to_xml_file( tool_data_table_config_path )
return table_elems
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -993,8 +993,8 @@
.first()
def get_url_from_repository_tool_shed( app, repository ):
"""
- The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu
- We need the URL to this tool shed, which is something like: http://toolshed.g2.bx.psu.edu/
+ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
+ something like: http://toolshed.g2.bx.psu.edu/.
"""
for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
if shed_url.find( repository.tool_shed ) >= 0:
@@ -1019,7 +1019,7 @@
sample_tool_data_table_conf = get_config_from_repository( app, 'tool_data_table_conf.xml.sample', repository, changeset_revision, dir )
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
# data_table_elem_names.
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
if error:
# TODO: Do more here than logging an exception.
log.debug( correction_msg )
@@ -1055,15 +1055,15 @@
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
return repository_tools_tups, sample_files_copied
-def handle_sample_tool_data_table_conf_file( app, filename ):
+def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
"""
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary as well as appending them to
- Galaxy's tool_data_table_conf.xml file on disk.
+ Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur)
+ if call is from the Galaxy side (not the tool shed), the new entries will be appended to Galaxy's tool_data_table_conf.xml file on disk.
"""
error = False
message = ''
try:
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename, app.config.tool_data_table_config_path )
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename, app.config.tool_data_table_config_path, persist=persist )
except Exception, e:
message = str( e )
error = True
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -11,6 +11,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
+from galaxy.tool_shed.encoding_util import *
from common import *
from galaxy import eggs
@@ -995,6 +996,30 @@
url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
return trans.response.send_redirect( url )
@web.expose
+ def get_tool_dependencies( self, trans, **kwd ):
+ # Handle a request from a local Galaxy instance. If the request originated with the Galaxy instances' InstallManager, the value of 'webapp'
+ # will be 'install_manager'.
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ # If the request originated with the UpdateManager, it will not include a galaxy_url.
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ webapp = params.get( 'webapp', 'community' )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ for downloadable_revision in repository.downloadable_revisions:
+ if downloadable_revision.changeset_revision == changeset_revision:
+ break
+ metadata = downloadable_revision.metadata
+ tool_dependencies = metadata.get( 'tool_dependencies', '' )
+ if webapp == 'install_manager':
+ if tool_dependencies:
+ return tool_shed_encode( tool_dependencies )
+ return ''
+ # TODO: future handler where request comes from some Galaxy admin feature.
+ @web.expose
def browse_repositories( self, trans, **kwd ):
# We add params to the keyword dict in this method in order to rename the param
# with an "f-" prefix, simulating filtering by clicking a search link. We have
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Add Bowtie2 preset options to Tophat2 wrapper.
by Bitbucket 31 May '12
by Bitbucket 31 May '12
31 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9ae91fb49fdd/
changeset: 9ae91fb49fdd
user: jgoecks
date: 2012-05-31 17:08:24
summary: Add Bowtie2 preset options to Tophat2 wrapper.
affected #: 2 files
diff -r 2f971a75d8575082685e97325967fedc67b462a4 -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 tools/ngs_rna/tophat2_wrapper.py
--- a/tools/ngs_rna/tophat2_wrapper.py
+++ b/tools/ngs_rna/tophat2_wrapper.py
@@ -64,6 +64,12 @@
parser.add_option( '', '--min-coverage-intron', dest='min_coverage_intron', help='Minimum intron length that may be found during coverage search' )
parser.add_option( '', '--max-coverage-intron', dest='max_coverage_intron', help='Maximum intron length that may be found during coverage search' )
+ # Bowtie2 options.
+ parser.add_option( '', '--b2-very-fast', action='store_true', dest='b2_very_fast')
+ parser.add_option( '', '--b2-fast', action='store_true', dest='b2_fast')
+ parser.add_option( '', '--b2-very-sensitive', action='store_true', dest='b2_very_sensitive')
+ parser.add_option( '', '--b2-sensitive', action='store_true', dest='b2_sensitive')
+
# Fusion search options.
parser.add_option( '', '--fusion-search', action='store_true', dest='fusion_search' )
parser.add_option( '', '--fusion-anchor-length', dest='fusion_anchor_length' )
@@ -198,6 +204,16 @@
( int( options.fusion_anchor_length ), int( options.fusion_min_dist ),
int( options.fusion_read_mismatches ), int( options.fusion_multireads ),
int( options.fusion_multipairs ), options.fusion_ignore_chromosomes )
+
+ # Bowtie2 options.
+ if options.b2_very_fast:
+ opts += ' --b2-very-fast'
+ if options.b2_fast:
+ opts += ' --b2-fast'
+ if options.b2_sensitive:
+ opts += ' --b2-sensitive'
+ if options.b2_very_sensitive:
+ opts += ' --b2-very-sensitive'
cmd = cmd % ( opts, index_path, reads )
diff -r 2f971a75d8575082685e97325967fedc67b462a4 -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -105,6 +105,13 @@
--fusion-multipairs $params.fusion_search.multipairs
--fusion-ignore-chromosomes $params.fusion_search.ignore_chromosomes
#end if
+
+ #if $params.bowtie2_settings.b2_settings == "Yes":
+ #if $params.bowtie2_settings.preset.b2_preset == "Yes":
+ --b2-$params.bowtie2_settings.preset.b2_preset_select
+ #end if
+ #end if
+
#end if
</command><inputs>
@@ -257,6 +264,33 @@
<param name="ignore_chromosomes" type="text" value='' label="Ignore some chromosomes such as chrM when detecting fusion break points"/></when></conditional>
+
+ <!-- Bowtie2 settings. -->
+ <conditional name="bowtie2_settings">
+ <param name="b2_settings" type="select" label="Set Bowtie2 settings">
+ <option selected="true" value="No">No</option>
+ <option value="Yes">Yes</option>
+ </param>
+ <when value="No" />
+ <when value="Yes">
+ <conditional name="preset">
+ <param name="b2_preset" type="select" label="Use Preset options">
+ <option selected="true" value="Yes">Yes</option>
+ <option value="No">No</option>
+ </param>
+ <when value="Yes">
+ <param name="b2_preset_select" type="select" label="Preset option">
+ <option value="very-fast">Very fast</option>
+ <option selected="true" value="fast">Fast</option>
+ <option value="sensitive">Sensitive</option>
+ <option value="very-sensitive">Very sensitive</option>
+ </param>
+ </when>
+ <!-- TODO: -->
+ <when value="No" />
+ </conditional>
+ </when>
+ </conditional></when><!-- full --></conditional><!-- params --></inputs>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Clearer explanation and help for default vs. full parameter settings.
by Bitbucket 30 May '12
by Bitbucket 30 May '12
30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b6ddc6196429/
changeset: b6ddc6196429
user: jgoecks
date: 2012-05-31 01:47:40
summary: Clearer explanation and help for default vs. full parameter settings.
affected #: 2 files
diff -r 377e1d1691fc285cbccc94b393ce7ed069956794 -r b6ddc6196429859c887f20c359e0afc463afa229 tools/ngs_rna/tophat_color_wrapper.xml
--- a/tools/ngs_rna/tophat_color_wrapper.xml
+++ b/tools/ngs_rna/tophat_color_wrapper.xml
@@ -179,7 +179,7 @@
<when value="single"><conditional name="sParams"><param name="sSettingsType" type="select" label="TopHat settings to use" help="You can use the default settings or set custom values for any of Tophat's parameters.">
- <option value="preSet">Use Defaults</option>
+ <option value="preSet">Default settings</option><option value="full">Full parameter list</option></param><when value="preSet" />
@@ -284,8 +284,8 @@
<param format="fastqcssanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Color-space: Must have Sanger-scaled quality values with ASCII offset 33" /><param name="mate_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs" /><conditional name="pParams">
- <param name="pSettingsType" type="select" label="TopHat settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full parameter list">
- <option value="preSet">Commonly used</option>
+ <param name="pSettingsType" type="select" label="TopHat settings to use" help="Use the Full parameter list to change default settings.">
+ <option value="preSet">Default settings</option><option value="full">Full parameter list</option></param><when value="preSet" />
diff -r 377e1d1691fc285cbccc94b393ce7ed069956794 -r b6ddc6196429859c887f20c359e0afc463afa229 tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -175,8 +175,8 @@
</param><when value="single"><conditional name="sParams">
- <param name="sSettingsType" type="select" label="TopHat settings to use" help="You can use the default settings or set custom values for any of Tophat's parameters.">
- <option value="preSet">Use Defaults</option>
+ <param name="sSettingsType" type="select" label="TopHat settings to use" help="Use the Full parameter list to change default settings.">
+ <option value="preSet">Default settings</option><option value="full">Full parameter list</option></param><when value="preSet" />
@@ -281,8 +281,8 @@
<param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" /><param name="mate_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs" /><conditional name="pParams">
- <param name="pSettingsType" type="select" label="TopHat settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full parameter list">
- <option value="preSet">Commonly used</option>
+ <param name="pSettingsType" type="select" label="TopHat settings to use" help="Use the Full parameter list to change default settings.">
+ <option value="preSet">Default settings</option><option value="full">Full parameter list</option></param><when value="preSet" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/377e1d1691fc/
changeset: 377e1d1691fc
user: jgoecks
date: 2012-05-31 00:09:26
summary: Fix compatibility issue with 9765176c63f7
affected #: 1 file
diff -r 1270fd83fab77ddda4385a6a3eb2011518ebfd3a -r 377e1d1691fc285cbccc94b393ce7ed069956794 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -1368,7 +1368,11 @@
// Introduction div shown when there are no tracks.
this.intro_div = $("<div/>").addClass("intro").appendTo(this.viewport_container).hide();
var add_tracks_button = $("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function () {
- add_datasets();
+ add_datasets(add_datasets_url, add_track_async_url, function(tracks) {
+ _.each(tracks, function(track) {
+ view.add_drawable( object_from_template(track, view) );
+ });
+ });
});
// Another label track at bottom
this.nav_labeltrack = $("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Use backbone to save trackster visualizations.
by Bitbucket 30 May '12
by Bitbucket 30 May '12
30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1270fd83fab7/
changeset: 1270fd83fab7
user: jgoecks
date: 2012-05-30 22:37:51
summary: Use backbone to save trackster visualizations.
affected #: 5 files
diff -r ec4584a0e500e755aa5436691318d442831d8787 -r 1270fd83fab77ddda4385a6a3eb2011518ebfd3a lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2445,8 +2445,8 @@
self.user = None
class Visualization( object ):
- def __init__( self, user=None, type=None, title=None, dbkey=None, slug=None, latest_revision=None ):
- self.id = None
+ def __init__( self, id=None, user=None, type=None, title=None, dbkey=None, slug=None, latest_revision=None ):
+ self.id = id
self.user = user
self.type = type
self.title = title
diff -r ec4584a0e500e755aa5436691318d442831d8787 -r 1270fd83fab77ddda4385a6a3eb2011518ebfd3a lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -234,15 +234,26 @@
rows.append( [location, name] )
return { 'data': rows }
- # TODO: this is duplicated from visualization controller; remove it once
- # routing incompatibilities have been resolved.
@web.json
- def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ def save( self, trans, vis_json ):
"""
Save a visualization; if visualization does not have an ID, a new
visualization is created. Returns JSON of visualization.
"""
- return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
+
+ # TODO: Need from_dict to convert json to Visualization object.
+ vis_config = from_json_string( vis_json )
+ config = {
+ 'view': vis_config[ 'datasets' ],
+ 'bookmarks': vis_config[ 'bookmarks' ],
+ 'viewport': vis_config[ 'viewport' ]
+ }
+ type = vis_config[ 'type' ]
+ id = vis_config[ 'id' ]
+ title = vis_config[ 'title' ]
+ dbkey = vis_config[ 'dbkey' ]
+ annotation = vis_config.get( 'annotation', None )
+ return self.save_visualization( trans, config, type, id, title, dbkey, annotation )
@web.expose
@web.require_login()
diff -r ec4584a0e500e755aa5436691318d442831d8787 -r 1270fd83fab77ddda4385a6a3eb2011518ebfd3a static/scripts/mvc/visualization.js
--- a/static/scripts/mvc/visualization.js
+++ b/static/scripts/mvc/visualization.js
@@ -54,6 +54,24 @@
type: "",
dbkey: "",
datasets: []
+ },
+
+ url: function() { return galaxy_paths.get("visualization_url"); },
+
+ /**
+ * POSTs visualization's JSON to its URL using the parameter 'vis_json'
+ * Note: This is necessary because (a) Galaxy requires keyword args and
+ * (b) Galaxy does not handle PUT now.
+ */
+ save: function() {
+ return $.ajax({
+ url: this.url(),
+ type: "POST",
+ dataType: "json",
+ data: {
+ vis_json: JSON.stringify(this)
+ }
+ });
}
});
diff -r ec4584a0e500e755aa5436691318d442831d8787 -r 1270fd83fab77ddda4385a6a3eb2011518ebfd3a static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -1368,7 +1368,7 @@
// Introduction div shown when there are no tracks.
this.intro_div = $("<div/>").addClass("intro").appendTo(this.viewport_container).hide();
var add_tracks_button = $("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function () {
- add_tracks();
+ add_datasets();
});
// Another label track at bottom
this.nav_labeltrack = $("<div/>").addClass("nav-labeltrack").appendTo(this.bottom_container);
diff -r ec4584a0e500e755aa5436691318d442831d8787 -r 1270fd83fab77ddda4385a6a3eb2011518ebfd3a templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -47,7 +47,10 @@
<script type="text/javascript">
//
// Place URLs here so that url_for can be used to generate them.
- //
+ //
+ galaxy_paths.set({
+ visualization_url: "${h.url_for( action='save' )}"
+ });
var
add_track_async_url = "${h.url_for( action='add_track_async' )}",
add_datasets_url = "${h.url_for( action='list_current_history_datasets' )}",
@@ -148,36 +151,29 @@
// FIXME: give unique IDs to Drawables and save overview as ID.
var overview_track_name = (view.overview_drawable ? view.overview_drawable.name : null);
- var payload = {
- 'view': view.to_dict(),
+ var visualization = new TracksterVisualization({
+ 'id': view.vis_id,
+ 'title': view.name,
+ 'dbkey': view.dbkey,
+ 'type': 'trackster',
+ 'datasets': view.to_dict(),
'viewport': { 'chrom': view.chrom, 'start': view.low , 'end': view.high, 'overview': overview_track_name },
'bookmarks': bookmarks
- };
-
- $.ajax({
- url: "${h.url_for( action='save' )}",
- type: "POST",
- data: {
- 'id': view.vis_id,
- 'title': view.name,
- 'dbkey': view.dbkey,
- 'type': 'trackster',
- 'config': JSON.stringify(payload)
- },
- dataType: "json",
- success: function(vis_info) {
+ });
+
+ visualization.save()
+ .success(function(vis_info) {
hide_modal();
view.vis_id = vis_info.vis_id;
view.has_changes = false;
-
+
// Needed to set URL when first saving a visualization.
window.history.pushState({}, "", vis_info.url + window.location.hash);
- },
- error: function() {
+ })
+ .error(function() {
show_modal( "Could Not Save", "Could not save visualization. Please try again later.",
{ "Close" : hide_modal } );
- }
- });
+ });
} },
{ icon_class: 'cross-circle', title: 'Close', on_click: function() {
window.location = "${h.url_for( controller='visualization', action='list' )}";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ec4584a0e500/
changeset: ec4584a0e500
user: greg
date: 2012-05-30 22:00:18
summary: Fixes for handling tool shed repositories that include tool_data_table_conf.xml.sample files. The fixes apply to determining if the tool is valid in the tool shed and also to the Galaxy environment when the tool shed repository is installed.
affected #: 6 files
diff -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f -r ec4584a0e500e755aa5436691318d442831d8787 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -152,7 +152,12 @@
if repository_tools_tups:
sample_files = metadata_dict.get( 'sample_files', [] )
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( self.app, self.tool_path, sample_files, repository_tools_tups )
+ repository_tools_tups = handle_missing_data_table_entry( self.app,
+ tool_shed_repository,
+ changeset_revision,
+ self.tool_path,
+ repository_tools_tups,
+ work_dir )
# Handle missing index files for tool parameters that are dynamically generated select lists.
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
diff -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f -r ec4584a0e500e755aa5436691318d442831d8787 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -6,7 +6,7 @@
to modify the tool configurations.
"""
-import logging, sys, os.path
+import logging, sys, os, os.path, tempfile, shutil
from galaxy import util
log = logging.getLogger( __name__ )
@@ -15,6 +15,9 @@
"""Manages a collection of tool data tables"""
def __init__( self, config_filename=None ):
self.data_tables = {}
+ # Store config elements for on-the-fly persistence.
+ self.data_table_elems = []
+ self.data_table_elem_names = []
if config_filename:
self.load_from_config_file( config_filename )
def __getitem__( self, key ):
@@ -29,14 +32,19 @@
type = table_elem.get( 'type', 'tabular' )
assert type in tool_data_table_types, "Unknown data table type '%s'" % type
table_elems.append( table_elem )
+ table_elem_name = table_elem.get( 'name', None )
+ if table_elem_name and table_elem_name not in self.data_table_elem_names:
+ self.data_table_elem_names.append( table_elem_name )
+ self.data_table_elems.append( table_elem )
table = tool_data_table_types[ type ]( table_elem )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Loaded tool data table '%s'", table.name )
return table_elems
- def add_new_entries_from_config_file( self, config_filename ):
+ def add_new_entries_from_config_file( self, config_filename, tool_data_table_config_path ):
"""
- We have 2 cases to handle, files whose root tag is <tables>, for example:
+ This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
+ installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example:
<tables><!-- Location of Tmap files --><table name="tmap_indexes" comment_char="#">
@@ -53,6 +61,8 @@
"""
tree = util.parse_xml( config_filename )
root = tree.getroot()
+ # Make a copy of the current list of data_table_elem_names so we can persist later if changes to the config file are necessary.
+ original_data_table_elem_names = [ name for name in self.data_table_elem_names ]
if root.tag == 'tables':
table_elems = self.load_from_config_file( config_filename )
else:
@@ -60,11 +70,31 @@
type = root.get( 'type', 'tabular' )
assert type in tool_data_table_types, "Unknown data table type '%s'" % type
table_elems.append( root )
+ table_elem_name = root.get( 'name', None )
+ if table_elem_name and table_elem_name not in self.data_table_elem_names:
+ self.data_table_elem_names.append( table_elem_name )
+ self.data_table_elems.append( root )
table = tool_data_table_types[ type ]( root )
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
- log.debug( "Loaded tool data table '%s", table.name )
+ log.debug( "Added new tool data table '%s'", table.name )
+ if self.data_table_elem_names != original_data_table_elem_names:
+ # Persist Galaxy's version of the changed tool_data_table_conf.xml file.
+ self.to_xml_file( tool_data_table_config_path )
return table_elems
+ def to_xml_file( self, tool_data_table_config_path ):
+ """Write the current in-memory version of the tool_data-table_conf.xml file to disk."""
+ full_path = os.path.abspath( tool_data_table_config_path )
+ fd, filename = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, "<!-- Use the file tool_data_table_conf.xml.oldlocstyle if you don't want to update your loc files as changed in revision 4550:535d276c92bc-->\n" )
+ os.write( fd, '<tables>\n' )
+ for elem in self.data_table_elems:
+ os.write( fd, '%s' % util.xml_to_string( elem ) )
+ os.write( fd, '</tables>\n' )
+ os.close( fd )
+ shutil.move( filename, full_path )
+ os.chmod( full_path, 0644 )
class ToolDataTable( object ):
def __init__( self, config_element ):
diff -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f -r ec4584a0e500e755aa5436691318d442831d8787 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1003,10 +1003,10 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
-def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ):
+def handle_missing_data_table_entry( app, repository, changeset_revision, tool_path, repository_tools_tups, dir ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
- tool_data_table_conf.xml file. This method is not called only from Galaxy (not the tool shed) when a repository is being installed.
+ tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed.
"""
missing_data_table_entry = False
for index, repository_tools_tup in enumerate( repository_tools_tups ):
@@ -1015,13 +1015,11 @@
missing_data_table_entry = True
break
if missing_data_table_entry:
- sample_file = None
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == 'tool_data_table_conf.xml.sample':
- break
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file )
+ sample_tool_data_table_conf = get_config_from_repository( app, 'tool_data_table_conf.xml.sample', repository, changeset_revision, dir )
+ # Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
+ # data_table_elem_names.
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
if error:
# TODO: Do more here than logging an exception.
log.debug( correction_msg )
@@ -1059,47 +1057,16 @@
return repository_tools_tups, sample_files_copied
def handle_sample_tool_data_table_conf_file( app, filename ):
"""
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary as well as appending them to the
- shed's tool_data_table_conf.xml file on disk.
+ Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary as well as appending them to
+ Galaxy's tool_data_table_conf.xml file on disk.
"""
- # TODO: Load an in-memory version of the tool_data_table_conf.xml file, and write it to disk
- # from the in-memory version only when changes are made.
error = False
message = ''
try:
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename )
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename, app.config.tool_data_table_config_path )
except Exception, e:
message = str( e )
error = True
- """
- # TODO: eliminate this - the shed should not need to write this to disk...
- if not error:
- # Add an entry to the end of the tool_data_table_conf.xml file.
- tdt_config = "%s/tool_data_table_conf.xml" % app.config.root
- if os.path.exists( tdt_config ):
- # Make a backup of the file since we're going to be changing it.
- today = date.today()
- backup_date = today.strftime( "%Y_%m_%d" )
- tdt_config_copy = '%s/tool_data_table_conf.xml_%s_backup' % ( app.config.root, backup_date )
- shutil.copy( os.path.abspath( tdt_config ), os.path.abspath( tdt_config_copy ) )
- # Write each line of the tool_data_table_conf.xml file, except the last line to a temp file.
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_filename = fh.name
- fh.close()
- new_tdt_config = open( tmp_filename, 'wb' )
- for i, line in enumerate( open( tdt_config, 'rb' ) ):
- if line.find( '</tables>' ) >= 0:
- for new_table_elem in new_table_elems:
- new_tdt_config.write( ' %s\n' % util.xml_to_string( new_table_elem ).rstrip( '\n' ) )
- new_tdt_config.write( '</tables>\n' )
- else:
- new_tdt_config.write( line )
- new_tdt_config.close()
- shutil.move( tmp_filename, os.path.abspath( tdt_config ) )
- else:
- message = "The required file named tool_data_table_conf.xml does not exist in the Galaxy install directory."
- error = True
- """
return error, message
def handle_tool_dependencies( app, tool_shed_repository, installed_changeset_revision, tool_dependencies_config ):
"""
@@ -1208,11 +1175,20 @@
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
+ # Handle missing data table entries for tool parameters that are dynamically generated select lists.
work_dir = make_tmp_directory()
+ repository_tools_tups = handle_missing_data_table_entry( trans.app,
+ tool_shed_repository,
+ changeset_revision,
+ tool_path,
+ repository_tools_tups,
+ work_dir )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ # Handle missing index files for tool parameters that are dynamically generated select lists.
sample_files = metadata_dict.get( 'sample_files', [] )
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_path, sample_files, repository_tools_tups )
- # Handle missing index files for tool parameters that are dynamically generated select lists.
repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( trans.app, sample_files, sample_files_copied=sample_files_copied )
@@ -1406,7 +1382,7 @@
trans.app.toolbox.write_integrated_tool_panel_config_file()
def reset_tool_data_tables( app ):
# Reset the tool_data_tables to an empty dictionary.
- app.tool_data_tables = galaxy.tools.data.ToolDataTableManager()
+ app.tool_data_tables.data_tables = {}
def strip_path( fpath ):
file_path, file_name = os.path.split( fpath )
return file_name
diff -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f -r ec4584a0e500e755aa5436691318d442831d8787 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -613,6 +613,7 @@
else:
ctx_rev = repository.ctx_rev
clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ tool_section = None
if repository.includes_tools:
# Get the location in the tool panel in which each tool was originally loaded.
metadata = repository.metadata
@@ -668,19 +669,19 @@
tool_section = trans.app.toolbox.tool_panel[ section_key ]
else:
tool_section = None
- tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- ctx_rev=ctx_rev,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- install_tool_dependencies=install_tool_dependencies )
+ tool_shed_repository, metadata_dict, error_message = load_repository_contents( trans,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ install_tool_dependencies=install_tool_dependencies )
if error_message:
message += error_message
status = 'error'
diff -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f -r ec4584a0e500e755aa5436691318d442831d8787 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -36,8 +36,8 @@
self.security = security.SecurityHelper( id_secret=self.config.id_secret )
# Tag handler
self.tag_handler = CommunityTagHandler()
- # Tool data tables
- self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path )
+ # Tool data tables - never pass a config file here because the tool shed should always have an empty dictionary!
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager()
# The tool shed has no toolbox, but this attribute is still required.
self.toolbox = None
# Load security policy
diff -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f -r ec4584a0e500e755aa5436691318d442831d8787 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -201,23 +201,18 @@
if options:
if options.tool_data_table or options.missing_tool_data_table_name:
# Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_found = False
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == 'tool_data_table_conf.xml.sample':
- sample_found = True
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_file )
- if error:
- can_set_metadata = False
- invalid_files.append( ( sample_file_name, correction_msg ) )
- else:
- options.missing_tool_data_table_name = None
- break
- if not sample_found:
+ sample_tool_data_table_conf = get_config( 'tool_data_table_conf.xml.sample', repo, repo_dir, ctx, dir )
+ if sample_tool_data_table_conf:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_tool_data_table_conf )
+ if error:
+ can_set_metadata = False
+ invalid_files.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
+ else:
+ options.missing_tool_data_table_name = None
+ else:
can_set_metadata = False
- correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. "
- correction_msg += "Upload a file named tool_data_table_conf.xml.sample to the repository "
- correction_msg += "that includes the required entry to correct this error.<br/>"
+ correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
+ correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
invalid_files.append( ( xml_file_in_ctx, correction_msg ) )
if options.index_file or options.missing_index_file:
# Make sure the repository contains the required xxx.loc.sample file.
@@ -245,8 +240,8 @@
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files.append( ( xml_file_in_ctx, correction_msg ) )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
return sample_files_copied, can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata reecords associated with the repository that have a changeset_revision that is not in changeset_revisions.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Load datatype converters and display appliations included in installed tool shed repositories after the app's toolbox has been inititlaized.
by Bitbucket 30 May '12
by Bitbucket 30 May '12
30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/16a93eb6eaf6/
changeset: 16a93eb6eaf6
user: greg
date: 2012-05-30 15:46:25
summary: Load datatype converters and display appliations included in installed tool shed repositories after the app's toolbox has been inititlaized.
affected #: 4 files
diff -r 356aa8a00cfdd8c1d29f7065601f55120d241005 -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -62,7 +62,6 @@
# However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
# in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict
# between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
- # This will also load proprietary datatype converters and display applications.
self.installed_repository_manager.load_proprietary_datatypes()
# Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
@@ -84,6 +83,8 @@
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed import update_manager
self.update_manager = update_manager.UpdateManager( self )
+ # Load proprietary datatype converters and display applications.
+ self.installed_repository_manager.load_proprietary_converters_and_display_applications()
# Load datatype display applications defined in local datatypes_conf.xml
self.datatypes_registry.load_display_applications()
# Load datatype converters defined in local datatypes_conf.xml
diff -r 356aa8a00cfdd8c1d29f7065601f55120d241005 -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -19,6 +19,7 @@
self.tool_configs = self.app.config.tool_configs
if self.app.config.migrated_tools_config not in self.tool_configs:
self.tool_configs.append( self.app.config.migrated_tools_config )
+ self.installed_repository_dicts = []
def get_repository_install_dir( self, tool_shed_repository ):
for tool_config in self.tool_configs:
tree = ElementTree.parse( tool_config )
@@ -43,5 +44,13 @@
.order_by( self.model.ToolShedRepository.table.c.id ):
relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
if relative_install_dir:
- galaxy.util.shed_util.load_datatype_items( self.app, tool_shed_repository, relative_install_dir )
+ installed_repository_dict = galaxy.util.shed_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir )
+ if installed_repository_dict:
+ self.installed_repository_dicts.append( installed_repository_dict )
+ def load_proprietary_converters_and_display_applications( self, deactivate=False ):
+ for installed_repository_dict in self.installed_repository_dicts:
+ if installed_repository_dict[ 'converter_path' ]:
+ galaxy.util.shed_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate )
+ if installed_repository_dict[ 'display_path' ]:
+ galaxy.util.shed_util.load_installed_display_applications( installed_repository_dict, deactivate=deactivate )
\ No newline at end of file
diff -r 356aa8a00cfdd8c1d29f7065601f55120d241005 -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1152,10 +1152,14 @@
parent_id=tool_version_using_parent_id.id )
sa_session.add( tool_version_association )
sa_session.flush()
-def load_datatype_items( app, repository, relative_install_dir, deactivate=False ):
- # Load proprietary datatypes.
+def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
+ # Load or deactivate proprietary datatype converters
+ app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
+def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
+ # Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
work_dir = make_tmp_directory()
+ repository_dict = None
datatypes_config = get_config_from_repository( app,
'datatypes_conf.xml',
repository,
@@ -1173,16 +1177,14 @@
tool_dicts=metadata.get( 'tools', [] ),
converter_path=converter_path,
display_path=display_path )
- if converter_path:
- # Load or deactivate proprietary datatype converters
- app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=repository_dict, deactivate=deactivate )
- if display_path:
- # Load or deactivate proprietary datatype display applications
- app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate )
try:
shutil.rmtree( work_dir )
except:
pass
+ return repository_dict
+def load_installed_display_applications( installed_repository_dict, deactivate=False ):
+ # Load or deactivate proprietary datatype display applications
+ app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
def load_repository_contents( trans, repository_name, description, owner, changeset_revision, ctx_rev, tool_path, repository_clone_url,
relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None, install_tool_dependencies=False ):
"""
@@ -1403,8 +1405,8 @@
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
def reset_tool_data_tables( app ):
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( app.config.tool_data_table_config_path )
+ # Reset the tool_data_tables to an empty dictionary.
+ app.tool_data_tables = galaxy.tools.data.ToolDataTableManager()
def strip_path( fpath ):
file_path, file_name = os.path.split( fpath )
return file_name
diff -r 356aa8a00cfdd8c1d29f7065601f55120d241005 -r 16a93eb6eaf6b2f9b73bbe10d0e4fab1d1142b9f lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -189,7 +189,11 @@
remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall=remove_from_disk_checked )
if repository.includes_datatypes:
# Deactivate proprietary datatypes.
- load_datatype_items( trans.app, repository, repository_install_dir, deactivate=True )
+ installed_repository_dict = load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=True )
+ if installed_repository_dict[ 'converter_path' ]:
+ load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True )
+ if installed_repository_dict[ 'display_path' ]:
+ load_installed_display_applications( installed_repository_dict, deactivate=True )
if remove_from_disk_checked:
try:
# Remove the repository from disk.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Fix bug in showing/hiding filters in Trackster.
by Bitbucket 30 May '12
by Bitbucket 30 May '12
30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/356aa8a00cfd/
changeset: 356aa8a00cfd
user: jgoecks
date: 2012-05-30 15:39:59
summary: Fix bug in showing/hiding filters in Trackster.
affected #: 1 file
diff -r 3dd591f01f6aae374142a62c25db98bacae5c4e3 -r 356aa8a00cfdd8c1d29f7065601f55120d241005 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -2598,8 +2598,10 @@
* Update filter's slider.
*/
update_ui_elt: function () {
- // Only show filter if min != max because filter is not useful otherwise.
- if (this.min !== this.max) {
+ // Only show filter if min < max because filter is not useful otherwise. This
+ // covers all corner cases, such as when min, max have not been defined and
+ // when min == max.
+ if (this.min < this.max) {
this.parent_div.show();
}
else {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

30 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3dd591f01f6a/
changeset: 3dd591f01f6a
user: natefoo
date: 2012-05-30 10:31:56
summary: Handle spaces between commas in admin_users
affected #: 1 file
diff -r 37a28d5134b37b8450d9c266e1a5a6bb261461bf -r 3dd591f01f6aae374142a62c25db98bacae5c4e3 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -657,7 +657,7 @@
def user_is_admin( self ):
if self.api_inherit_admin:
return True
- admin_users = self.app.config.get( "admin_users", "" ).split( "," )
+ admin_users = [ x.strip() for x in self.app.config.get( "admin_users", "" ).split( "," ) ]
return self.user and admin_users and self.user.email in admin_users
def user_can_do_run_as( self ):
run_as_users = self.app.config.get( "api_allow_run_as", "" ).split( "," )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fixes for loading a tool in a tool shed repository.
by Bitbucket 29 May '12
by Bitbucket 29 May '12
29 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/37a28d5134b3/
changeset: 37a28d5134b3
user: greg
date: 2012-05-29 20:46:02
summary: Fixes for loading a tool in a tool shed repository.
affected #: 4 files
diff -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b -r 37a28d5134b37b8450d9c266e1a5a6bb261461bf lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -785,14 +785,14 @@
for filename in ctx:
ctx_file_name = strip_path( filename )
if ctx_file_name == config_file:
- return get_named_tmpfile_from_ctx( ctx, filename, dir=dir )
+ return get_named_tmpfile_from_ctx( ctx, filename, dir )
# Finally look in the repository manifest.
for changeset in repo.changelog:
prev_ctx = repo.changectx( changeset )
for ctx_file in prev_ctx.files():
ctx_file_name = strip_path( ctx_file )
if ctx_file_name == config_file:
- return get_named_tmpfile_from_ctx( prev_ctx, filename, dir=dir )
+ return get_named_tmpfile_from_ctx( prev_ctx, filename, dir )
return None
def get_config_from_disk( config_file, relative_install_dir ):
for root, dirs, files in os.walk( relative_install_dir ):
diff -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b -r 37a28d5134b37b8450d9c266e1a5a6bb261461bf lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -547,7 +547,7 @@
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
.first()
-def get_named_tmpfile_from_ctx( ctx, filename, dir=None ):
+def get_named_tmpfile_from_ctx( ctx, filename, dir ):
fctx = ctx[ filename ]
fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
tmp_filename = fh.name
@@ -574,6 +574,28 @@
if parent_id is None:
# The tool did not change through all of the changeset revisions.
return old_id
+def get_previous_valid_changset_revision( repository, repo, before_changeset_revision ):
+ changeset_tups = []
+ for repository_metadata in repository.downloadable_revisions:
+ changeset_revision = repository_metadata.changeset_revision
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ rev = '%04d' % ctx.rev()
+ else:
+ rev = '-1'
+ changeset_tups.append( ( rev, changeset_revision ) )
+ previous_changeset_revision = None
+ current_changeset_revision = None
+ for changeset_tup in sorted( changeset_tups ):
+ current_changeset_revision = changeset_tup[ 1 ]
+ if current_changeset_revision == before_changeset_revision:
+ if previous_changeset_revision:
+ return previous_changeset_revision
+ else:
+ # Return the hash value of an empty repository changlog - note that this will not be a valid changset revision.
+ return '000000000000'
+ else:
+ previous_changeset_revision = current_changeset_revision
def get_repository( trans, id ):
"""Get a repository from the database via id"""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
@@ -743,8 +765,42 @@
ToolClass = Tool
return ToolClass( config_file, root, trans.app )
return None
-def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ):
- tool_config = strip_path( tool_config )
+def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ):
+ """
+ Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision
+ is a valid (downloadable) changset revision. If changeset_revision is the repository tip, then the tool will be loaded from it's file on disk.
+ Otherwise, the tool config will be located in the repository manifest between the received valid changeset revision and the previous valid
+ changeset revision (if one exists) or the first changeset revision in the repository (if one doesn't).
+ """
+ def load_from_tmp_config( ctx, ctx_file, work_dir ):
+ tool = None
+ message = ''
+ tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
+ element_tree = util.parse_xml( tmp_tool_config )
+ element_tree_root = element_tree.getroot()
+ # Look for code files required by the tool config.
+ tmp_code_files = []
+ for code_elem in element_tree_root.findall( 'code' ):
+ code_file_name = code_elem.get( 'file' )
+ tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir )
+ if tmp_code_file_name:
+ tmp_code_files.append( tmp_code_file_name )
+ try:
+ tool = load_tool( trans, tmp_tool_config )
+ except Exception, e:
+ tool = None
+ message = "Error loading tool: %s. Clicking <b>Reset metadata</b> may correct this error." % str( e )
+ for tmp_code_file in tmp_code_files:
+ try:
+ os.unlink( tmp_code_file )
+ except:
+ pass
+ try:
+ os.unlink( tmp_tool_config )
+ except:
+ pass
+ return tool, message
+ tool_config_filename = strip_path( tool_config_filename )
repository = get_repository( trans, repository_id )
repo_files_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_files_dir )
@@ -757,48 +813,39 @@
if tool_data_table_config:
error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if changeset_revision == repository.tip:
+ # Load the tool fron it's config file on disk.
try:
- copied_tool_config = copy_file_from_disk( tool_config, repo_files_dir, work_dir )
+ copied_tool_config = copy_file_from_disk( tool_config_filename, repo_files_dir, work_dir )
tool = load_tool( trans, copied_tool_config )
except Exception, e:
tool = None
- message = "Error loading tool: %s." % str( e )
+ message = "Error loading tool from config '%s': %s." % ( tool_config_filename, str( e ) )
else:
- # Get the tool config file name from the hgweb url, something like: /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
- old_tool_config_file_name = tool_config.split( '/' )[ -1 ]
- in_ctx = False
+ found = False
+ tool = None
+ # Get the tool config from ctx if present.
for ctx_file in ctx.files():
ctx_file_name = strip_path( ctx_file )
- if ctx_file_name == old_tool_config_file_name:
- in_ctx = True
+ if ctx_file_name == tool_config_filename:
+ found = True
break
- if in_ctx:
- tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, dir=work_dir )
- element_tree = util.parse_xml( tmp_tool_config )
- element_tree_root = element_tree.getroot()
- # Look for code files required by the tool config.
- tmp_code_files = []
- for code_elem in element_tree_root.findall( 'code' ):
- code_file_name = code_elem.get( 'file' )
- tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir )
- if tmp_code_file_name:
- tmp_code_files.append( tmp_code_file_name )
- try:
- tool = load_tool( trans, tmp_tool_config )
- except Exception, e:
- tool = None
- message = "Error loading tool: %s. Clicking <b>Reset metadata</b> may correct this error." % str( e )
- for tmp_code_file in tmp_code_files:
- try:
- os.unlink( tmp_code_file )
- except:
- pass
- try:
- os.unlink( tmp_tool_config )
- except:
- pass
+ if found:
+ if found:
+ tool, message = load_from_tmp_config( ctx, ctx_file, work_dir )
else:
- tool = None
+ # Get the tool config from the repository manifest between valid changeset revisions.
+ previous_valid_changset_revision = get_previous_valid_changset_revision( repository, repo, changeset_revision )
+ for changeset in reversed_filtered_changelog( repo, previous_valid_changset_revision, changeset_revision ):
+ manifest_changeset_revision = str( repo.changectx( changeset ) )
+ manifest_ctx = repo.changectx( changeset )
+ for ctx_file in manifest_ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if ctx_file_name == tool_config_filename:
+ found = True
+ break
+ if found:
+ tool, message = load_from_tmp_config( manifest_ctx, ctx_file, work_dir )
+ break
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
try:
@@ -811,7 +858,7 @@
tool = None
valid = False
error_message = ''
- tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir=dir )
+ tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir )
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
or check_bz2( tmp_config )[ 0 ] or check_zip( tmp_config ) ):
try:
@@ -979,6 +1026,26 @@
clean_repository_metadata( trans, id, changeset_revisions )
add_repository_metadata_tool_versions( trans, id, changeset_revisions )
return '', 'ok'
+def reversed_filtered_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
+ """
+ Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
+ including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be '000000000000'
+ if no valid changesets exist before included_upper_bounds_changeset_revision.
+ """
+ if excluded_lower_bounds_changeset_revision == '000000000000':
+ appending_started = True
+ else:
+ appending_started = False
+ reversed_changelog = []
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ if appending_started:
+ reversed_changelog.insert( 0, changeset )
+ if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
+ appending_started = True
+ if changeset_hash == included_upper_bounds_changeset_revision:
+ break
+ return reversed_changelog
def set_repository_metadata( trans, id, changeset_revision, content_alert_str='', **kwd ):
"""
Set repository metadata on the repository tip, returning specific error messages (if any) to alert the repository owner that the changeset
diff -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b -r 37a28d5134b37b8450d9c266e1a5a6bb261461bf lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -402,7 +402,6 @@
url_args=url_args,
allow_multiple=False,
async_compatible=False ) ]
- # Render the list view
return self.valid_repository_list_grid( trans, **kwd )
@web.expose
def browse_invalid_tools( self, trans, **kwd ):
@@ -1891,7 +1890,7 @@
for filename in ctx:
ctx_file_name = strip_path( filename )
if ctx_file_name == tool_config:
- tool_config_path = get_named_tmpfile_from_ctx( ctx, filename, dir=work_dir )
+ tool_config_path = get_named_tmpfile_from_ctx( ctx, filename, work_dir )
break
metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans,
repo,
diff -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b -r 37a28d5134b37b8450d9c266e1a5a6bb261461bf lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -43,7 +43,6 @@
# See the handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new
if params.get( 'upload_button', False ):
- current_working_dir = os.getcwd()
if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
@@ -270,9 +269,6 @@
error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
if error:
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- #if filename_in_archive.endswith( '.loc.sample' ):
- # # Handle the special case where a xxx.loc.sample file is being uploaded by copying it to ~/tool-data/xxx.loc.
- # copy_sample_file( trans.app, filename_in_archive )
commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
# See if the content of the change set was valid.
admin_only = len( repository.downloadable_revisions ) != 1
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b129ae3fc035/
changeset: b129ae3fc035
user: jgoecks
date: 2012-05-28 18:05:20
summary: Move icons from trackster.css to base.css for general usage.
affected #: 5 files
diff -r 03cb8ee86726813073913be149516ab10e601e55 -r b129ae3fc0359f08b9f47ae33230249a4ce0975e static/june_2007_style/base_sprites.less.tmpl
--- a/static/june_2007_style/base_sprites.less.tmpl
+++ b/static/june_2007_style/base_sprites.less.tmpl
@@ -102,14 +102,28 @@
-sprite-group: fugue;
-sprite-image: silk/chart_curve.png;
}
-
+.icon-button.disk--arrow {
+ -sprite-group: fugue;
+ -sprite-image: fugue/disk--arrow-bw.png;
+}
+.icon-button.disk--arrow:hover {
+ -sprite-group: fugue;
+ -sprite-image: fugue/disk--arrow.png;
+}
+.icon-button.cross-circle {
+ -sprite-group: fugue;
+ -sprite-image: fugue/cross-circle-bw.png;
+}
+.icon-button.cross-circle:hover {
+ -sprite-group: fugue;
+ -sprite-image: fugue/cross-circle.png;
+}
.text-and-autocomplete-select {
-sprite-group: fugue;
-sprite-image: fugue/control-270.png;
-sprite-horiz-position: right;
}
-
div.historyItem-error .state-icon {
-sprite-group: history-states;
-sprite-image: data_error.png;
diff -r 03cb8ee86726813073913be149516ab10e601e55 -r b129ae3fc0359f08b9f47ae33230249a4ce0975e static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -754,7 +754,11 @@
.icon-button.plus-button:hover{background:url(fugue.png) no-repeat 0px -364px;}
.icon-button.gear{background:url(fugue.png) no-repeat 0px -390px;}
.icon-button.chart_curve{background:url(fugue.png) no-repeat 0px -416px;}
-.text-and-autocomplete-select{background:url(fugue.png) no-repeat right -442px;}
+.icon-button.disk--arrow{background:url(fugue.png) no-repeat 0px -442px;}
+.icon-button.disk--arrow:hover{background:url(fugue.png) no-repeat 0px -468px;}
+.icon-button.cross-circle{background:url(fugue.png) no-repeat 0px -494px;}
+.icon-button.cross-circle:hover{background:url(fugue.png) no-repeat 0px -520px;}
+.text-and-autocomplete-select{background:url(fugue.png) no-repeat right -546px;}
div.historyItem-error .state-icon{background:url(history-states.png) no-repeat 0px 0px;}
div.historyItem-empty .state-icon{background:url(history-states.png) no-repeat 0px -25px;}
div.historyItem-queued .state-icon{background:url(history-states.png) no-repeat 0px -50px;}
diff -r 03cb8ee86726813073913be149516ab10e601e55 -r b129ae3fc0359f08b9f47ae33230249a4ce0975e static/june_2007_style/blue/fugue.png
Binary file static/june_2007_style/blue/fugue.png has changed
diff -r 03cb8ee86726813073913be149516ab10e601e55 -r b129ae3fc0359f08b9f47ae33230249a4ce0975e static/june_2007_style/blue/trackster.css
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -84,16 +84,12 @@
.icon-button.toggle:hover{background:transparent url(../images/fugue/toggle.png) no-repeat;}
.icon-button.toggle-expand{background:transparent url(../images/fugue/toggle-expand-bw.png) no-repeat;margin-right:0px;}
.icon-button.toggle-expand:hover{background:transparent url(../images/fugue/toggle-expand.png) no-repeat;}
-.icon-button.cross-circle{background:transparent url(../images/fugue/cross-circle-bw.png) no-repeat;margin-right:0px;}
-.icon-button.cross-circle:hover{background:transparent url(../images/fugue/cross-circle.png) no-repeat;}
.icon-button.block--plus{background:transparent url(../images/fugue/block--plus-bw.png) no-repeat;}
.icon-button.block--plus:hover{background:transparent url(../images/fugue/block--plus.png) no-repeat;}
.icon-button.bookmarks{background:transparent url(../images/fugue/bookmarks-bw.png) no-repeat;}
.icon-button.bookmarks:hover{background:transparent url(../images/fugue/bookmarks.png) no-repeat;}
.icon-button.toolbox{background:transparent url(../images/fugue/toolbox-bw.png) no-repeat;}
.icon-button.toolbox:hover{background:transparent url(../images/fugue/toolbox.png) no-repeat;}
-.icon-button.disk--arrow{background:transparent url(../images/fugue/disk--arrow-bw.png) no-repeat;}
-.icon-button.disk--arrow:hover{background:transparent url(../images/fugue/disk--arrow.png) no-repeat;}
.child-track-icon{background:url('../images/fugue/arrow-000-small-bw.png') no-repeat;width:30px;cursor:move;}
.track-resize{background:white url('../images/visualization/draggable_vertical.png') no-repeat top center;position:absolute;right:3px;bottom:-4px;width:14px;height:7px;border:solid #999 1px;z-index:100;}
.bookmark{background:white;border:solid #999 1px;border-right:none;margin:0.5em;margin-right:0;padding:0.5em;}
diff -r 03cb8ee86726813073913be149516ab10e601e55 -r b129ae3fc0359f08b9f47ae33230249a4ce0975e static/june_2007_style/trackster.css.tmpl
--- a/static/june_2007_style/trackster.css.tmpl
+++ b/static/june_2007_style/trackster.css.tmpl
@@ -405,12 +405,6 @@
.icon-button.toggle-expand:hover {
background: transparent url(../images/fugue/toggle-expand.png) no-repeat;
}
-.icon-button.cross-circle {
- background: transparent url(../images/fugue/cross-circle-bw.png) no-repeat;margin-right:0px;
-}
-.icon-button.cross-circle:hover {
- background: transparent url(../images/fugue/cross-circle.png) no-repeat;
-}
.icon-button.block--plus {
background: transparent url(../images/fugue/block--plus-bw.png) no-repeat;
}
@@ -429,12 +423,6 @@
.icon-button.toolbox:hover {
background: transparent url(../images/fugue/toolbox.png) no-repeat;
}
-.icon-button.disk--arrow {
- background: transparent url(../images/fugue/disk--arrow-bw.png) no-repeat;
-}
-.icon-button.disk--arrow:hover {
- background: transparent url(../images/fugue/disk--arrow.png) no-repeat;
-}
.child-track-icon {
background:url('../images/fugue/arrow-000-small-bw.png') no-repeat;
width: 30px;
https://bitbucket.org/galaxy/galaxy-central/changeset/9765176c63f7/
changeset: 9765176c63f7
user: jgoecks
date: 2012-05-28 18:10:04
summary: Visualization: (a) rename circos viz 'circster' and (b) foundational work for circster and backbone-visualization integration. Also pack scripts.
affected #: 11 files
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -277,7 +277,7 @@
class UsesVisualizationMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use Visualization objects. """
- viz_types = [ "trackster", "circos" ]
+ viz_types = [ "trackster", "circster" ]
len_files = None
@@ -360,7 +360,7 @@
end = config['viewport']['end']
overview = config['viewport']['overview']
vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
- elif type == 'circos':
+ elif type == 'circster':
# TODO.
pass
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -758,7 +758,7 @@
@web.expose
@web.require_login( "use Galaxy visualizations", use_panels=True )
- def circos( self, trans, hda_ldda, dataset_id ):
+ def circster( self, trans, hda_ldda, dataset_id ):
# Get dataset.
dataset = self._get_dataset( trans, hda_ldda, dataset_id )
@@ -777,7 +777,7 @@
summary = indexer.get_summary( chrom_info[ 'chrom' ], 0, chrom_info[ 'len' ], level=4 )
dataset_summary.append( summary )
- return trans.fill_template_mako( "visualization/circos.mako", dataset=dataset, dataset_summary=dataset_summary, genome=genome )
+ return trans.fill_template_mako( "visualization/circster.mako", dataset=dataset, dataset_summary=dataset_summary, genome=genome )
# -----------------
# Helper methods.
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b static/scripts/mvc/tools.js
--- a/static/scripts/mvc/tools.js
+++ b/static/scripts/mvc/tools.js
@@ -181,6 +181,7 @@
* applies search results as they become available.
*/
var ToolPanel = Backbone.Collection.extend({
+ // TODO: need to generate this using url_for
url: "/tools",
tools: new ToolCollection(),
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b static/scripts/mvc/ui.js
--- a/static/scripts/mvc/ui.js
+++ b/static/scripts/mvc/ui.js
@@ -1,5 +1,24 @@
/**
- * Utility models and views for Galaxy objects.
+ * -- Functions for creating large UI elements. --
+ */
+
+/**
+ * Returns an IconButtonMenuView for the provided configuration.
+ * Configuration is a list of dictionaries where each dictionary
+ * defines an icon button. Each dictionary must have the following
+ * elements: icon_class, title, and on_click.
+ */
+var create_icon_buttons_menu = function(config) {
+ // Create and initialize menu.
+ var buttons = new IconButtonCollection(
+ _.map(config, function(button_config) { return new IconButton(button_config); })
+ );
+
+ return new IconButtonMenuView( {collection: buttons} );
+};
+
+/**
+ * -- Utility models and views for Galaxy objects. --
*/
/**
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b static/scripts/mvc/visualization.js
--- a/static/scripts/mvc/visualization.js
+++ b/static/scripts/mvc/visualization.js
@@ -26,9 +26,77 @@
});
/**
+ * A genome browser bookmark.
+ */
+var BrowserBookmark = Backbone.Model.extend({
+ defaults: {
+ chrom: null,
+ start: 0,
+ end: 0,
+ note: ""
+ }
+});
+
+/**
+ * Bookmarks collection.
+ */
+var BrowserBookmarks = Backbone.Collection.extend({
+ model: BrowserBookmark
+});
+
+/**
+ * A visualization.
+ */
+var Visualization = Backbone.RelationalModel.extend({
+ defaults: {
+ id: "",
+ title: "",
+ type: "",
+ dbkey: "",
+ datasets: []
+ }
+});
+
+/**
+ * A Trackster visualization.
+ */
+var TracksterVisualization = Visualization.extend({
+ defaults: {
+ bookmarks: [],
+ viewport: {}
+ }
+});
+
+/**
+ * A Circster visualization.
+ */
+var CircsterVisualization = Visualization.extend({
+});
+
+/**
+ * A dataset. In Galaxy, datasets are associated with a history, so
+ * this object is also known as a HistoryDatasetAssociation.
+ */
+var Dataset = Backbone.Model.extend({
+ defaults: {
+ id: "",
+ type: "",
+ name: "",
+ hda_ldda: ""
+ }
+});
+
+/**
* A histogram dataset.
*/
var HistogramDataset = Backbone.Model.extend({
+ /*
+ defaults: {
+ data: [],
+ dataset: null,
+ max: 0
+ },
+ */
initialize: function(data) {
// Set max across dataset.
@@ -42,9 +110,16 @@
});
/**
- * Layout for a histogram dataset in a circos visualization.
+ * Configuration data for a Trackster track.
*/
-var CircosHistogramDatasetLayout = Backbone.Model.extend({
+var TrackConfig = Backbone.Model.extend({
+
+});
+
+/**
+ * Layout for a histogram dataset in a circster visualization.
+ */
+var CircsterHistogramDatasetLayout = Backbone.Model.extend({
// TODO: should accept genome and dataset and use these to generate layout data.
/**
@@ -104,8 +179,8 @@
* -- Views --
*/
-var CircosView = Backbone.View.extend({
- className: 'circos',
+var CircsterView = Backbone.View.extend({
+ className: 'circster',
initialize: function(options) {
this.width = options.width;
@@ -124,7 +199,7 @@
dataset_arc_height = this.dataset_arc_height,
// Layout chromosome arcs.
- arcs_layout = new CircosHistogramDatasetLayout({
+ arcs_layout = new CircsterHistogramDatasetLayout({
genome: this.genome,
total_gap: this.total_gap
}),
@@ -205,3 +280,58 @@
this.view.go_to(new_loc);
}
});
+
+/**
+ * -- Helper functions.
+ */
+
+/**
+ * Use a popup grid to add more datasets.
+ */
+var add_datasets = function(dataset_url, add_track_async_url, success_fn) {
+ $.ajax({
+ url: dataset_url,
+ data: { "f-dbkey": view.dbkey },
+ error: function() { alert( "Grid failed" ); },
+ success: function(table_html) {
+ show_modal(
+ "Select datasets for new tracks",
+ table_html, {
+ "Cancel": function() {
+ hide_modal();
+ },
+ "Add": function() {
+ var requests = [];
+ $('input[name=id]:checked,input[name=ldda_ids]:checked').each(function() {
+ var data,
+ id = $(this).val();
+ if ($(this).attr("name") === "id") {
+ data = { hda_id: id };
+ } else {
+ data = { ldda_id: id};
+ }
+ requests[requests.length] = $.ajax({
+ url: add_track_async_url,
+ data: data,
+ dataType: "json",
+ });
+ });
+ // To preserve order, wait until there are definitions for all tracks and then add
+ // them sequentially.
+ $.when.apply($, requests).then(function() {
+ // jQuery always returns an Array for arguments, so need to look at first element
+ // to determine whether multiple requests were made and consequently how to
+ // map arguments to track definitions.
+ var track_defs = (arguments[0] instanceof Array ?
+ $.map(arguments, function(arg) { return arg[0]; }) :
+ [ arguments[0] ]
+ );
+ success_fn(track_defs);
+ });
+ hide_modal();
+ }
+ }
+ );
+ }
+ });
+};
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b static/scripts/packed/mvc/ui.js
--- a/static/scripts/packed/mvc/ui.js
+++ b/static/scripts/packed/mvc/ui.js
@@ -1,1 +1,1 @@
-var GalaxyPaths=Backbone.Model.extend({defaults:{root_path:"",image_path:""}});var IconButton=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null}});var IconButtonCollection=Backbone.Collection.extend({model:IconButton});var IconButtonMenuView=Backbone.View.extend({tagName:"div",render:function(){var a=this;this.collection.each(function(b){$("<a/>").attr("href","javascript:void(0)").attr("title",b.attributes.title).addClass("icon-button menu-button").addClass(b.attributes.icon_class).appendTo(a.$el).click(b.attributes.on_click)});return this}});var Grid=Backbone.Collection.extend({});var GridView=Backbone.View.extend({});
\ No newline at end of file
+var create_icon_buttons_menu=function(a){var b=new IconButtonCollection(_.map(a,function(c){return new IconButton(c)}));return new IconButtonMenuView({collection:b})};var GalaxyPaths=Backbone.Model.extend({defaults:{root_path:"",image_path:""}});var IconButton=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null}});var IconButtonCollection=Backbone.Collection.extend({model:IconButton});var IconButtonMenuView=Backbone.View.extend({tagName:"div",render:function(){var a=this;this.collection.each(function(b){$("<a/>").attr("href","javascript:void(0)").attr("title",b.attributes.title).addClass("icon-button menu-button").addClass(b.attributes.icon_class).appendTo(a.$el).click(b.attributes.on_click)});return this}});var Grid=Backbone.Collection.extend({});var GridView=Backbone.View.extend({});
\ No newline at end of file
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b static/scripts/packed/mvc/visualization.js
--- a/static/scripts/packed/mvc/visualization.js
+++ b/static/scripts/packed/mvc/visualization.js
@@ -1,1 +1,1 @@
-var Genome=Backbone.Model.extend({defaults:{name:null,key:null,chroms_info:null},get_chroms_info:function(){return this.attributes.chroms_info.chrom_info}});var HistogramDataset=Backbone.Model.extend({initialize:function(a){this.attributes.data=a;this.attributes.max=_.max(a,function(b){if(!b||typeof b==="string"){return 0}return b[1]})[1]}});var CircosHistogramDatasetLayout=Backbone.Model.extend({chroms_layout:function(){var b=this.attributes.genome.get_chroms_info(),d=d3.layout.pie().value(function(f){return f.len}).sort(null),e=d(b),a=this.attributes.total_gap/b.length,c=_.map(e,function(h,g){var f=h.endAngle-a;h.endAngle=(f>h.startAngle?f:h.startAngle);return h});return c},chrom_data_layout:function(j,b,g,f,h){if(!b||typeof b==="string"){return null}var d=b[0],i=b[3],c=d3.scale.linear().domain([0,h]).range([g,f]),e=d3.layout.pie().value(function(k){return i}).startAngle(j.startAngle).endAngle(j.endAngle),a=e(d);_.each(d,function(k,l){a[l].outerRadius=c(k[1])});return a}});var CircosView=Backbone.View.extend({className:"circos",initialize:function(a){this.width=a.width;this.height=a.height;this.total_gap=a.total_gap;this.genome=a.genome;this.dataset=a.dataset;this.radius_start=a.radius_start;this.dataset_arc_height=a.dataset_arc_height},render:function(){var d=this.radius_start,e=this.dataset_arc_height,j=new CircosHistogramDatasetLayout({genome:this.genome,total_gap:this.total_gap}),i=j.chroms_layout(),g=_.zip(i,this.dataset.attributes.data),h=this.dataset.attributes.max,b=_.map(g,function(m){var n=m[0],l=m[1];return j.chrom_data_layout(n,l,d,d+e,h)});var c=d3.select(this.$el[0]).append("svg").attr("width",this.width).attr("height",this.height).append("g").attr("transform","translate("+this.width/2+","+this.height/2+")");var k=c.append("g").attr("id","inner-arc"),f=d3.svg.arc().innerRadius(d).outerRadius(d+e),a=k.selectAll("#inner-arc>path").data(i).enter().append("path").attr("d",f).style("stroke","#ccc").style("fill","#ccc").append("title").text(function(l){return l.data.chrom});_.each(b,function(l){if(!l){return}var o=c.append("g"),n=d3.svg.arc().innerRadius(d),m=o.selectAll("path").data(l).enter().append("path").attr("d",n).style("stroke","red").style("fill","red")})}});var TrackBrowserRouter=Backbone.Router.extend({initialize:function(b){this.view=b.view;this.route(/([\w]+)$/,"change_location");this.route(/([\w]+\:[\d,]+-[\d,]+)$/,"change_location");var a=this;a.view.on("navigate",function(c){a.navigate(c)})},change_location:function(a){this.view.go_to(a)}});
\ No newline at end of file
+var Genome=Backbone.Model.extend({defaults:{name:null,key:null,chroms_info:null},get_chroms_info:function(){return this.attributes.chroms_info.chrom_info}});var BrowserBookmark=Backbone.Model.extend({defaults:{chrom:null,start:0,end:0,note:""}});var BrowserBookmarks=Backbone.Collection.extend({model:BrowserBookmark});var Visualization=Backbone.RelationalModel.extend({defaults:{id:"",title:"",type:"",dbkey:"",datasets:[]}});var TracksterVisualization=Visualization.extend({defaults:{bookmarks:[],viewport:{}}});var CircsterVisualization=Visualization.extend({});var Dataset=Backbone.Model.extend({defaults:{id:"",type:"",name:"",hda_ldda:""}});var HistogramDataset=Backbone.Model.extend({initialize:function(a){this.attributes.data=a;this.attributes.max=_.max(a,function(b){if(!b||typeof b==="string"){return 0}return b[1]})[1]}});var TrackConfig=Backbone.Model.extend({});var CircsterHistogramDatasetLayout=Backbone.Model.extend({chroms_layout:function(){var b=this.attributes.genome.get_chroms_info(),d=d3.layout.pie().value(function(f){return f.len}).sort(null),e=d(b),a=this.attributes.total_gap/b.length,c=_.map(e,function(h,g){var f=h.endAngle-a;h.endAngle=(f>h.startAngle?f:h.startAngle);return h});return c},chrom_data_layout:function(j,b,g,f,h){if(!b||typeof b==="string"){return null}var d=b[0],i=b[3],c=d3.scale.linear().domain([0,h]).range([g,f]),e=d3.layout.pie().value(function(k){return i}).startAngle(j.startAngle).endAngle(j.endAngle),a=e(d);_.each(d,function(k,l){a[l].outerRadius=c(k[1])});return a}});var CircsterView=Backbone.View.extend({className:"circster",initialize:function(a){this.width=a.width;this.height=a.height;this.total_gap=a.total_gap;this.genome=a.genome;this.dataset=a.dataset;this.radius_start=a.radius_start;this.dataset_arc_height=a.dataset_arc_height},render:function(){var d=this.radius_start,e=this.dataset_arc_height,j=new CircsterHistogramDatasetLayout({genome:this.genome,total_gap:this.total_gap}),i=j.chroms_layout(),g=_.zip(i,this.dataset.attributes.data),h=this.dataset.attributes.max,b=_.map(g,function(m){var n=m[0],l=m[1];return j.chrom_data_layout(n,l,d,d+e,h)});var c=d3.select(this.$el[0]).append("svg").attr("width",this.width).attr("height",this.height).append("g").attr("transform","translate("+this.width/2+","+this.height/2+")");var k=c.append("g").attr("id","inner-arc"),f=d3.svg.arc().innerRadius(d).outerRadius(d+e),a=k.selectAll("#inner-arc>path").data(i).enter().append("path").attr("d",f).style("stroke","#ccc").style("fill","#ccc").append("title").text(function(l){return l.data.chrom});_.each(b,function(l){if(!l){return}var o=c.append("g"),n=d3.svg.arc().innerRadius(d),m=o.selectAll("path").data(l).enter().append("path").attr("d",n).style("stroke","red").style("fill","red")})}});var TrackBrowserRouter=Backbone.Router.extend({initialize:function(b){this.view=b.view;this.route(/([\w]+)$/,"change_location");this.route(/([\w]+\:[\d,]+-[\d,]+)$/,"change_location");var a=this;a.view.on("navigate",function(c){a.navigate(c)})},change_location:function(a){this.view.go_to(a)}});var add_datasets=function(a,c,b){$.ajax({url:a,data:{"f-dbkey":view.dbkey},error:function(){alert("Grid failed")},success:function(d){show_modal("Select datasets for new tracks",d,{Cancel:function(){hide_modal()},Add:function(){var e=[];$("input[name=id]:checked,input[name=ldda_ids]:checked").each(function(){var f,g=$(this).val();if($(this).attr("name")==="id"){f={hda_id:g}}else{f={ldda_id:g}}e[e.length]=$.ajax({url:c,data:f,dataType:"json",})});$.when.apply($,e).then(function(){var f=(arguments[0] instanceof Array?$.map(arguments,function(g){return g[0]}):[arguments[0]]);b(f)});hide_modal()}})}})};
\ No newline at end of file
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -61,7 +61,7 @@
<%def name="late_javascripts()">
## Scripts can be loaded later since they progressively add features to
## the panels, but do not change layout
- ${h.js( 'jquery.event.drag', 'jquery.event.hover', 'jquery.form', 'jquery.rating', 'galaxy.base', 'galaxy.panels' )}
+ ${h.js( 'jquery.event.drag', 'jquery.event.hover', 'jquery.form', 'jquery.rating', 'galaxy.base', 'galaxy.panels', 'jquery.tipsy' )}
<script type="text/javascript">
ensure_dd_helper();
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -42,13 +42,16 @@
<script type='text/javascript' src="${h.url_for('/static/scripts/excanvas.js')}"></script><![endif]-->
-${h.js( "galaxy.base", "galaxy.panels", "json2", "jquery", "jstorage", "jquery.event.drag", "jquery.event.hover","jquery.mousewheel", "jquery.autocomplete", "trackster", "trackster_ui", "jquery.ui.sortable.slider", "farbtastic", "jquery.tipsy", "mvc/visualization" )}
+${h.js( "galaxy.base", "galaxy.panels", "json2", "jquery", "jstorage", "jquery.event.drag", "jquery.event.hover","jquery.mousewheel", "jquery.autocomplete", "trackster", "trackster_ui", "jquery.ui.sortable.slider", "farbtastic", "mvc/visualization" )}
<script type="text/javascript">
//
// Place URLs here so that url_for can be used to generate them.
//
- var default_data_url = "${h.url_for( action='data' )}",
+ var
+ add_track_async_url = "${h.url_for( action='add_track_async' )}",
+ add_datasets_url = "${h.url_for( action='list_current_history_datasets' )}",
+ default_data_url = "${h.url_for( action='data' )}",
raw_data_url = "${h.url_for( action='raw_data' )}",
run_tool_url = "${h.url_for( action='run_tool' )}",
rerun_tool_url = "${h.url_for( action='rerun_tool' )}",
@@ -66,60 +69,7 @@
browser_router = new TrackBrowserRouter(options);
Backbone.history.start();
};
-
- /**
- * Use a popup grid to add more tracks.
- */
- var add_tracks = function() {
- $.ajax({
- url: "${h.url_for( action='list_current_history_datasets' )}",
- data: { "f-dbkey": view.dbkey },
- error: function() { alert( "Grid failed" ); },
- success: function(table_html) {
- show_modal(
- "Select datasets for new tracks",
- table_html, {
- "Cancel": function() {
- hide_modal();
- },
- "Insert": function() {
- var requests = [];
- $('input[name=id]:checked,input[name=ldda_ids]:checked').each(function() {
- var data,
- id = $(this).val();
- if ($(this).attr("name") === "id") {
- data = { hda_id: id };
- } else {
- data = { ldda_id: id};
- }
- requests[requests.length] = $.ajax({
- url: "${h.url_for( action='add_track_async' )}",
- data: data,
- dataType: "json",
- });
- });
- // To preserve order, wait until there are definitions for all tracks and then add
- // them sequentially.
- $.when.apply($, requests).then(function() {
- // jQuery always returns an Array for arguments, so need to look at first element
- // to determine whether multiple requests were made and consequently how to
- // map arguments to track definitions.
- var track_defs = (arguments[0] instanceof Array ?
- $.map(arguments, function(arg) { return arg[0]; }) :
- [ arguments[0] ]
- );
- for (var i= 0; i < track_defs.length; i++) {
- view.add_drawable( object_from_template(track_defs[i], view) );
- }
- });
- hide_modal();
- }
- }
- );
- }
- });
- };
-
+
/**
* Use a popup grid to bookmarks from a dataset.
*/
@@ -168,23 +118,22 @@
var browser_router;
$(function() {
// Create and initialize menu.
- var
-
- buttons = new IconButtonCollection([
- new IconButton({icon_class: 'plus-button', title: 'Add tracks', on_click: function() { add_tracks(); } }),
- new IconButton({icon_class: 'block--plus', title: 'Add group', on_click: function() {
+ var menu = create_icon_buttons_menu([
+ { icon_class: 'plus-button', title: 'Add tracks', on_click: function() {
+ add_datasets(add_datasets_url, add_track_async_url, function(tracks) {
+ _.each(tracks, function(track) {
+ view.add_drawable( object_from_template(track, view) );
+ });
+ });
+ } },
+ { icon_class: 'block--plus', title: 'Add group', on_click: function() {
view.add_drawable( new DrawableGroup(view, view, { name: "New Group" }) );
- } }),
- /*
- new IconButton({icon_class: 'toolbox', title: 'Use tools', on_click: function() {
- console.log("toolbox!")
- } }),
- */
- new IconButton({icon_class: 'bookmarks', title: 'Bookmarks', on_click: function() {
+ } },
+ { icon_class: 'bookmarks', title: 'Bookmarks', on_click: function() {
// HACK -- use style to determine if panel is hidden and hide/show accordingly.
parent.force_right_panel(($("div#right").css("right") == "0px" ? "hide" : "show"));
- } }),
- new IconButton({icon_class: 'disk--arrow', title: 'Save', on_click: function() {
+ } },
+ { icon_class: 'disk--arrow', title: 'Save', on_click: function() {
// Show saving dialog box
show_modal("Saving...", "progress");
@@ -229,15 +178,11 @@
{ "Close" : hide_modal } );
}
});
- } }),
- new IconButton({icon_class: 'cross-circle', title: 'Close', on_click: function() {
+ } },
+ { icon_class: 'cross-circle', title: 'Close', on_click: function() {
window.location = "${h.url_for( controller='visualization', action='list' )}";
- } })
- ]),
-
- menu = new IconButtonMenuView({
- collection: buttons
- });
+ } }
+ ]);
menu.render();
menu.$el.attr("style", "float: right");
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b templates/visualization/circos.mako
--- a/templates/visualization/circos.mako
+++ /dev/null
@@ -1,39 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="stylesheets()">
- ${parent.stylesheets()}
-</%def>
-
-<%def name="javascripts()">
- ${parent.javascripts()}
-
- ${h.js( "libs/d3", "mvc/visualization" )}
-
- <script type="text/javascript">
- $(function() {
- // -- Viz set up. --
-
- var genome = new Genome(JSON.parse('${ h.to_json_string( genome ) }')),
- dataset = new HistogramDataset(JSON.parse('${ h.to_json_string( dataset_summary ) }')),
- circos = new CircosView({
- width: 600,
- height: 600,
- // Gap is difficult to set because it very dependent on chromosome size and organization.
- total_gap: 2 * Math.PI * 0.5,
- genome: genome,
- dataset: dataset,
- radius_start: 100,
- dataset_arc_height: 50
- });
-
- // -- Render viz. --
-
- circos.render();
- $('body').append(circos.$el);
- });
- </script>
-</%def>
-
-<%def name="body()">
- <h1>Circos plot for '${dataset.name}'</h1>
-</%def>
diff -r b129ae3fc0359f08b9f47ae33230249a4ce0975e -r 9765176c63f7ada3ee30a649ab3ef13396d9c84b templates/visualization/circster.mako
--- /dev/null
+++ b/templates/visualization/circster.mako
@@ -0,0 +1,97 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="visualization"
+ self.message_box_visible=False
+%>
+</%def>
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+
+ ${h.js( "libs/d3", "mvc/visualization" )}
+
+ <script type="text/javascript">
+ $(function() {
+ // -- Visualization menu and set up.
+ var menu = create_icon_buttons_menu([
+ { icon_class: 'plus-button', title: 'Add tracks', on_click: function() { add_tracks(); } },
+ { icon_class: 'disk--arrow', title: 'Save', on_click: function() {
+ // Show saving dialog box
+ show_modal("Saving...", "progress");
+
+ $.ajax({
+ url: "${h.url_for( action='save' )}",
+ type: "POST",
+ data: {
+ 'id': view.vis_id,
+ 'title': view.name,
+ 'dbkey': view.dbkey,
+ 'type': 'trackster',
+ 'config': JSON.stringify(payload)
+ },
+ dataType: "json",
+ success: function(vis_info) {
+ hide_modal();
+ view.vis_id = vis_info.vis_id;
+ view.has_changes = false;
+
+ // Needed to set URL when first saving a visualization.
+ window.history.pushState({}, "", vis_info.url + window.location.hash);
+ },
+ error: function() {
+ show_modal( "Could Not Save", "Could not save visualization. Please try again later.",
+ { "Close" : hide_modal } );
+ }
+ });
+ } },
+ { icon_class: 'cross-circle', title: 'Close', on_click: function() {
+ window.location = "${h.url_for( controller='visualization', action='list' )}";
+ } }
+ ]);
+
+ menu.render();
+ menu.$el.attr("style", "float: right");
+ $("#center .unified-panel-header-inner").append(menu.$el);
+ // Manual tipsy config because default gravity is S and cannot be changed.
+ $(".menu-button").tipsy( {gravity: 'n'} );
+
+ // -- Viz set up. --
+
+ var genome = new Genome(JSON.parse('${ h.to_json_string( genome ) }')),
+ dataset = new HistogramDataset(JSON.parse('${ h.to_json_string( dataset_summary ) }')),
+ circster = new CircsterView({
+ width: 600,
+ height: 600,
+ // Gap is difficult to set because it very dependent on chromosome size and organization.
+ total_gap: 2 * Math.PI * 0.5,
+ genome: genome,
+ dataset: dataset,
+ radius_start: 100,
+ dataset_arc_height: 50
+ });
+
+ // -- Render viz. --
+
+ circster.render();
+ $('#vis').append(circster.$el);
+ });
+ </script>
+</%def>
+
+<%def name="center_panel()">
+ <div class="unified-panel-header" unselectable="on">
+ <div class="unified-panel-header-inner">
+ <div style="float:left;" id="title"></div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div id="vis" class="unified-panel-body"></div>
+</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c5ba3065b82e/
changeset: c5ba3065b82e
user: jgoecks
date: 2012-05-27 21:54:32
summary: Support for saving and validating different visualization types.
affected #: 3 files
diff -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -305,60 +305,64 @@
decoded_id = trans.security.decode_id( id )
vis = session.query( trans.model.Visualization ).get( decoded_id )
- # Decode the payload
- decoded_payload = config
# Create new VisualizationRevision that will be attached to the viz
vis_rev = trans.model.VisualizationRevision()
vis_rev.visualization = vis
vis_rev.title = vis.title
vis_rev.dbkey = dbkey
+
+ # -- Validate config. --
+
+ if vis.type == 'trackster':
+ def unpack_track( track_json ):
+ """ Unpack a track from its json. """
+ return {
+ "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
+ "hda_ldda": track_json.get('hda_ldda', 'hda'),
+ "name": track_json['name'],
+ "track_type": track_json['track_type'],
+ "prefs": track_json['prefs'],
+ "mode": track_json['mode'],
+ "filters": track_json['filters'],
+ "tool_state": track_json['tool_state']
+ }
- def unpack_track( track_json ):
- """ Unpack a track from its json. """
- return {
- "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
- "hda_ldda": track_json.get('hda_ldda', 'hda'),
- "name": track_json['name'],
- "track_type": track_json['track_type'],
- "prefs": track_json['prefs'],
- "mode": track_json['mode'],
- "filters": track_json['filters'],
- "tool_state": track_json['tool_state']
- }
+ def unpack_collection( collection_json ):
+ """ Unpack a collection from its json. """
+ unpacked_drawables = []
+ drawables = collection_json[ 'drawables' ]
+ for drawable_json in drawables:
+ if 'track_type' in drawable_json:
+ drawable = unpack_track( drawable_json )
+ else:
+ drawable = unpack_collection( drawable_json )
+ unpacked_drawables.append( drawable )
+ return {
+ "name": collection_json.get( 'name', '' ),
+ "obj_type": collection_json[ 'obj_type' ],
+ "drawables": unpacked_drawables,
+ "prefs": collection_json.get( 'prefs' , [] ),
+ "filters": collection_json.get( 'filters', None )
+ }
- def unpack_collection( collection_json ):
- """ Unpack a collection from its json. """
- unpacked_drawables = []
- drawables = collection_json[ 'drawables' ]
- for drawable_json in drawables:
- if 'track_type' in drawable_json:
- drawable = unpack_track( drawable_json )
- else:
- drawable = unpack_collection( drawable_json )
- unpacked_drawables.append( drawable )
- return {
- "name": collection_json.get( 'name', '' ),
- "obj_type": collection_json[ 'obj_type' ],
- "drawables": unpacked_drawables,
- "prefs": collection_json.get( 'prefs' , [] ),
- "filters": collection_json.get( 'filters', None )
- }
+ # TODO: unpack and validate bookmarks:
+ def unpack_bookmarks( bookmarks_json ):
+ return bookmarks_json
- # TODO: unpack and validate bookmarks:
- def unpack_bookmarks( bookmarks_json ):
- return bookmarks_json
-
- # Unpack and validate view content.
- view_content = unpack_collection( decoded_payload[ 'view' ] )
- bookmarks = unpack_bookmarks( decoded_payload[ 'bookmarks' ] )
- vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
- # Viewport from payload
- if 'viewport' in decoded_payload:
- chrom = decoded_payload['viewport']['chrom']
- start = decoded_payload['viewport']['start']
- end = decoded_payload['viewport']['end']
- overview = decoded_payload['viewport']['overview']
- vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+ # Unpack and validate view content.
+ view_content = unpack_collection( config[ 'view' ] )
+ bookmarks = unpack_bookmarks( config[ 'bookmarks' ] )
+ vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
+ # Viewport from payload
+ if 'viewport' in config:
+ chrom = config['viewport']['chrom']
+ start = config['viewport']['start']
+ end = config['viewport']['end']
+ overview = config['viewport']['overview']
+ vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+ elif type == 'circos':
+ # TODO.
+ pass
vis.latest_revision = vis_rev
session.add( vis_rev )
diff -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -234,6 +234,15 @@
rows.append( [location, name] )
return { 'data': rows }
+ # TODO: this is duplicated from visualization controller; remove it once
+ # routing incompatibilities have been resolved.
+ @web.json
+ def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ """
+ Save a visualization; if visualization does not have an ID, a new
+ visualization is created. Returns JSON of visualization.
+ """
+ return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
@web.expose
@web.require_login()
@@ -409,11 +418,7 @@
result = data_provider.get_data( chrom, low, high, int( start_val ), int( max_vals ), **kwargs )
result.update( { 'dataset_type': tracks_dataset_type, 'extra_info': extra_info } )
return result
-
- @web.json
- def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
- return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
-
+
@web.expose
@web.require_login( "see all available libraries" )
def list_libraries( self, trans, **kwargs ):
diff -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -349,7 +349,7 @@
def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="",
visualization_type="" ):
"""
- Create a new visualization
+ Creates a new visualization or returns a form for creating visualization.
"""
visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
if trans.request.method == "POST":
@@ -381,6 +381,14 @@
.add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
help="A description of the visualization; annotation is shown alongside published visualizations."),
template="visualization/create.mako" )
+
+ @web.json
+ def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ """
+ Save a visualization; if visualization does not have an ID, a new
+ visualization is created. Returns JSON of visualization.
+ """
+ return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
@web.expose
@web.require_login( "edit visualizations" )
https://bitbucket.org/galaxy/galaxy-central/changeset/03cb8ee86726/
changeset: 03cb8ee86726
user: jgoecks
date: 2012-05-27 22:09:16
summary: Include 'mixin' in names of many common controller mixins.
affected #: 22 files
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/tools/genome_index/__init__.py
--- a/lib/galaxy/tools/genome_index/__init__.py
+++ b/lib/galaxy/tools/genome_index/__init__.py
@@ -3,7 +3,7 @@
from galaxy.web.framework.helpers import to_unicode
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.json import *
-from galaxy.web.base.controller import UsesHistory
+from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.tools.data import ToolDataTableManager
log = logging.getLogger(__name__)
@@ -63,8 +63,6 @@
if gitd:
destination = None
- alldone = True
- indexjobs = gitd.deferred.params[ 'indexjobs' ]
tdtman = ToolDataTableManager()
xmltree = tdtman.load_from_config_file(app.config.tool_data_table_config_path)
for node in xmltree:
@@ -165,14 +163,6 @@
self._check_link( fasta, target )
for line in location:
self._add_line( line[ 'file' ], line[ 'line' ] )
- for indexjob in indexjobs:
- js = sa_session.query( model.Job ).filter_by( id=indexjob ).first()
- if js.state not in [ 'ok', 'done', 'error' ]:
- alldone = False
- if alldone:
- gitd.deferred.state = 'ok'
- sa_session.add( gitd.deferred )
- sa_session.flush()
def _check_link( self, targetfile, symlink ):
target = os.path.relpath( targetfile, os.path.dirname( symlink ) )
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/tools/imp_exp/__init__.py
--- a/lib/galaxy/tools/imp_exp/__init__.py
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -4,7 +4,7 @@
from galaxy.web.framework.helpers import to_unicode
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.json import *
-from galaxy.web.base.controller import UsesHistory
+from galaxy.web.base.controller import UsesHistoryMixin
log = logging.getLogger(__name__)
@@ -42,7 +42,7 @@
toolbox.tools_by_id[ history_imp_tool.id ] = history_imp_tool
log.debug( "Loaded history import tool: %s", history_imp_tool.id )
-class JobImportHistoryArchiveWrapper( object, UsesHistory, UsesAnnotations ):
+class JobImportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ):
"""
Class provides support for performing jobs that import a history from
an archive.
@@ -263,7 +263,7 @@
jiha.job.stderr += "Error cleaning up history import job: %s" % e
db_session.flush()
-class JobExportHistoryArchiveWrapper( object, UsesHistory, UsesAnnotations ):
+class JobExportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ):
"""
Class provides support for performing jobs that export a history to an
archive.
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/datasets.py
--- a/lib/galaxy/web/api/datasets.py
+++ b/lib/galaxy/web/api/datasets.py
@@ -10,7 +10,7 @@
log = logging.getLogger( __name__ )
-class DatasetsController( BaseAPIController, UsesHistoryDatasetAssociation ):
+class DatasetsController( BaseAPIController, UsesHistoryMixinDatasetAssociationMixin ):
@web.expose_api
def index( self, trans, hda_id, **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -12,7 +12,7 @@
log = logging.getLogger( __name__ )
-class HistoriesController( BaseAPIController, UsesHistory ):
+class HistoriesController( BaseAPIController, UsesHistoryMixin ):
@web.expose_api
def index( self, trans, deleted='False', **kwd ):
@@ -153,7 +153,7 @@
POST /api/histories/deleted/{encoded_quota_id}/undelete
Undeletes a quota
"""
- history = self.get_history( trans, id, check_ownership=True, check_accessible=False, deleted=True )
+ history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
history.deleted = False
trans.sa_session.add( history )
trans.sa_session.flush()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/history_contents.py
--- a/lib/galaxy/web/api/history_contents.py
+++ b/lib/galaxy/web/api/history_contents.py
@@ -12,7 +12,7 @@
log = logging.getLogger( __name__ )
-class HistoryContentsController( BaseAPIController, UsesHistoryDatasetAssociation, UsesHistory, UsesLibrary, UsesLibraryItems ):
+class HistoryContentsController( BaseAPIController, UsesHistoryMixinDatasetAssociationMixin, UsesHistoryMixin, UsesLibraryMixin, UsesLibraryMixinItems ):
@web.expose_api
def index( self, trans, history_id, **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/library_contents.py
--- a/lib/galaxy/web/api/library_contents.py
+++ b/lib/galaxy/web/api/library_contents.py
@@ -10,7 +10,7 @@
log = logging.getLogger( __name__ )
-class LibraryContentsController( BaseAPIController, UsesLibrary, UsesLibraryItems ):
+class LibraryContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
@web.expose_api
def index( self, trans, library_id, **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/api/quotas.py
--- a/lib/galaxy/web/api/quotas.py
+++ b/lib/galaxy/web/api/quotas.py
@@ -2,7 +2,7 @@
API operations on Quota objects.
"""
import logging
-from galaxy.web.base.controller import BaseAPIController, Admin, UsesQuota, url_for
+from galaxy.web.base.controller import BaseAPIController, Admin, UsesQuotaMixin, url_for
from galaxy import web, util
from elementtree.ElementTree import XML
@@ -14,7 +14,7 @@
log = logging.getLogger( __name__ )
-class QuotaAPIController( BaseAPIController, Admin, AdminActions, UsesQuota, QuotaParamParser ):
+class QuotaAPIController( BaseAPIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
@web.expose_api
@web.require_admin
def index( self, trans, deleted='False', **kwd ):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -177,7 +177,11 @@
def not_implemented( self, trans, **kwd ):
raise HTTPNotImplemented()
-class SharableItemSecurity:
+#
+# -- Mixins for working with Galaxy objects. --
+#
+
+class SharableItemSecurityMixin:
""" Mixin for handling security for sharable items. """
def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
""" Security checks for an item: checks if (a) user owns item or (b) item is accessible to user. """
@@ -197,11 +201,7 @@
raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
return item
-#
-# TODO: need to move UsesHistory, etc. mixins to better location - perhaps lib/galaxy/model/XXX ?
-#
-
-class UsesHistoryDatasetAssociation:
+class UsesHistoryMixinDatasetAssociationMixin:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
def get_dataset( self, trans, dataset_id, check_ownership=True, check_accessible=False ):
""" Get an HDA object by id. """
@@ -259,14 +259,14 @@
truncated = False
return truncated, dataset_data
-class UsesLibrary:
+class UsesLibraryMixin:
def get_library( self, trans, id, check_ownership=False, check_accessible=True ):
l = self.get_object( trans, id, 'Library' )
if check_accessible and not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( trans.get_current_user_roles(), l ) ):
error( "LibraryFolder is not accessible to the current user" )
return l
-class UsesLibraryItems( SharableItemSecurity ):
+class UsesLibraryMixinItems( SharableItemSecurityMixin ):
def get_library_folder( self, trans, id, check_ownership=False, check_accessible=True ):
return self.get_object( trans, id, 'LibraryFolder', check_ownership=False, check_accessible=check_accessible )
def get_library_dataset_dataset_association( self, trans, id, check_ownership=False, check_accessible=True ):
@@ -274,7 +274,7 @@
def get_library_dataset( self, trans, id, check_ownership=False, check_accessible=True ):
return self.get_object( trans, id, 'LibraryDataset', check_ownership=False, check_accessible=check_accessible )
-class UsesVisualization( SharableItemSecurity ):
+class UsesVisualizationMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use Visualization objects. """
viz_types = [ "trackster", "circos" ]
@@ -522,7 +522,7 @@
return visualization
-class UsesStoredWorkflow( SharableItemSecurity ):
+class UsesStoredWorkflowMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use StoredWorkflow objects. """
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
""" Get a StoredWorkflow from the database by id, verifying ownership. """
@@ -560,7 +560,7 @@
# Connections by input name
step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections )
-class UsesHistory( SharableItemSecurity ):
+class UsesHistoryMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use History objects. """
def get_history( self, trans, id, check_ownership=True, check_accessible=False, deleted=None ):
"""Get a History from the database by id, verifying ownership."""
@@ -580,7 +580,7 @@
query = query.filter( trans.model.Dataset.purged == False )
return query.all()
-class UsesFormDefinitions:
+class UsesFormDefinitionsMixin:
"""Mixin for controllers that use Galaxy form objects."""
def get_all_forms( self, trans, all_versions=False, filter=None, form_type='All' ):
"""
@@ -1342,7 +1342,7 @@
selected_value=selected_value,
refresh_on_change=True )
-class Sharable:
+class SharableMixin:
""" Mixin for a controller that manages an item that can be shared. """
# -- Implemented methods. --
@@ -1433,7 +1433,7 @@
""" Return item based on id. """
raise "Unimplemented Method"
-class UsesQuota( object ):
+class UsesQuotaMixin( object ):
def get_quota( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
return self.get_object( trans, id, 'Quota', check_ownership=False, check_accessible=False, deleted=deleted )
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py
+++ b/lib/galaxy/web/controllers/admin.py
@@ -428,7 +428,7 @@
def build_initial_query( self, trans, **kwd ):
return trans.sa_session.query( self.model_class )
-class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuota, QuotaParamParser ):
+class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
user_list_grid = UserListGrid()
role_list_grid = RoleListGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -150,7 +150,7 @@
.filter( model.History.deleted==False ) \
.filter( self.model_class.visible==True )
-class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistory, UsesHistoryDatasetAssociation, UsesItemRatings ):
+class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistoryMixin, UsesHistoryMixinDatasetAssociationMixin, UsesItemRatings ):
stored_list_grid = HistoryDatasetAssociationListGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/external_service.py
--- a/lib/galaxy/web/controllers/external_service.py
+++ b/lib/galaxy/web/controllers/external_service.py
@@ -63,7 +63,7 @@
grids.GridAction( "Create new external service", dict( controller='external_service', action='create_external_service' ) )
]
-class ExternalService( BaseUIController, UsesFormDefinitions ):
+class ExternalService( BaseUIController, UsesFormDefinitionsMixin ):
external_service_grid = ExternalServiceGrid()
@web.expose
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py
+++ b/lib/galaxy/web/controllers/history.py
@@ -190,7 +190,7 @@
# A public history is published, has a slug, and is not deleted.
return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-class HistoryController( BaseUIController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ):
+class HistoryController( BaseUIController, SharableMixin, UsesAnnotations, UsesItemRatings, UsesHistoryMixin ):
@web.expose
def index( self, trans ):
return ""
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py
+++ b/lib/galaxy/web/controllers/library_common.py
@@ -68,7 +68,7 @@
pass
os.rmdir( tmpd )
-class LibraryCommon( BaseUIController, UsesFormDefinitions ):
+class LibraryCommon( BaseUIController, UsesFormDefinitionsMixin ):
@web.json
def library_item_updates( self, trans, ids=None, states=None ):
# Avoid caching
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py
+++ b/lib/galaxy/web/controllers/page.py
@@ -272,8 +272,8 @@
# Default behavior:
_BaseHTMLProcessor.unknown_endtag( self, tag )
-class PageController( BaseUIController, Sharable, UsesAnnotations, UsesHistory,
- UsesStoredWorkflow, UsesHistoryDatasetAssociation, UsesVisualization, UsesItemRatings ):
+class PageController( BaseUIController, SharableMixin, UsesAnnotations, UsesHistoryMixin,
+ UsesStoredWorkflowMixin, UsesHistoryMixinDatasetAssociationMixin, UsesVisualizationMixin, UsesItemRatings ):
_page_list = PageListGrid()
_all_published_list = PageAllPublishedGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/request_type.py
--- a/lib/galaxy/web/controllers/request_type.py
+++ b/lib/galaxy/web/controllers/request_type.py
@@ -72,7 +72,7 @@
grids.GridAction( "Create new request type", dict( controller='request_type', action='create_request_type' ) )
]
-class RequestType( BaseUIController, UsesFormDefinitions ):
+class RequestType( BaseUIController, UsesFormDefinitionsMixin ):
request_type_grid = RequestTypeGrid()
@web.expose
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -94,7 +94,7 @@
return query
return query.filter_by( sample_id=trans.security.decode_id( sample_id ) )
-class RequestsAdmin( BaseUIController, UsesFormDefinitions ):
+class RequestsAdmin( BaseUIController, UsesFormDefinitionsMixin ):
request_grid = AdminRequestsGrid()
datatx_grid = DataTransferGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/requests_common.py
--- a/lib/galaxy/web/controllers/requests_common.py
+++ b/lib/galaxy/web/controllers/requests_common.py
@@ -93,7 +93,7 @@
confirm="Samples cannot be added to this request after it is submitted. Click OK to submit." )
]
-class RequestsCommon( BaseUIController, UsesFormDefinitions ):
+class RequestsCommon( BaseUIController, UsesFormDefinitionsMixin ):
@web.json
def sample_state_updates( self, trans, ids=None, states=None ):
# Avoid caching
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py
+++ b/lib/galaxy/web/controllers/root.py
@@ -11,7 +11,7 @@
log = logging.getLogger( __name__ )
-class RootController( BaseUIController, UsesHistory, UsesAnnotations ):
+class RootController( BaseUIController, UsesHistoryMixin, UsesAnnotations ):
@web.expose
def default(self, trans, target1=None, target2=None, **kwd):
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -163,7 +163,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( self.model_class.user_id == trans.user.id )
-class TracksController( BaseUIController, UsesVisualization, UsesHistoryDatasetAssociation, Sharable ):
+class TracksController( BaseUIController, UsesVisualizationMixin, UsesHistoryMixinDatasetAssociationMixin, SharableMixin ):
"""
Controller for track browser interface. Handles building a new browser from
datasets in the current history, and display of the resulting browser.
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -38,7 +38,7 @@
def build_initial_query( self, trans, **kwd ):
return trans.sa_session.query( self.model_class ).filter( self.model_class.user_id == trans.user.id )
-class User( BaseUIController, UsesFormDefinitions ):
+class User( BaseUIController, UsesFormDefinitionsMixin ):
user_openid_grid = UserOpenIDGrid()
installed_len_files = None
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -68,8 +68,8 @@
return query.filter( self.model_class.deleted==False ).filter( self.model_class.published==True )
-class VisualizationController( BaseUIController, Sharable, UsesAnnotations,
- UsesHistoryDatasetAssociation, UsesVisualization,
+class VisualizationController( BaseUIController, SharableMixin, UsesAnnotations,
+ UsesHistoryMixinDatasetAssociationMixin, UsesVisualizationMixin,
UsesItemRatings ):
_user_list_grid = VisualizationListGrid()
_published_list_grid = VisualizationAllPublishedGrid()
diff -r c5ba3065b82ee8a9d165c164f1f864526baf18e4 -r 03cb8ee86726813073913be149516ab10e601e55 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -105,7 +105,7 @@
if self.cur_tag == self.target_tag:
self.tag_content += text
-class WorkflowController( BaseUIController, Sharable, UsesStoredWorkflow, UsesAnnotations, UsesItemRatings ):
+class WorkflowController( BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesAnnotations, UsesItemRatings ):
stored_list_grid = StoredWorkflowListGrid()
published_list_grid = StoredWorkflowAllPublishedGrid()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Provide generic support for saving visualizations.
by Bitbucket 27 May '12
by Bitbucket 27 May '12
27 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fdc3f20a46d3/
changeset: fdc3f20a46d3
user: jgoecks
date: 2012-05-27 18:34:52
summary: Provide generic support for saving visualizations.
affected #: 3 files
diff -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -280,41 +280,91 @@
viz_types = [ "trackster", "circos" ]
len_files = None
-
+
def create_visualization( self, trans, title, slug, type, dbkey, annotation=None, config={} ):
- user = trans.get_user()
+ """ Create visualiation and first revision. """
+ visualization = self._create_visualization( trans, title, type, dbkey, slug, annotation )
- # Error checking.
- title_err = slug_err = ""
- if not title:
- title_err = "visualization name is required"
- elif not slug:
- slug_err = "visualization id is required"
- elif not VALID_SLUG_RE.match( slug ):
- slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
- elif trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
- slug_err = "visualization id must be unique"
-
- if title_err or slug_err:
- return { 'title_err': title_err, 'slug_err': slug_err }
-
- # Create visualization
- visualization = trans.model.Visualization( user=user, title=title, slug=slug, dbkey=dbkey, type=type )
- if annotation:
- annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
-
- # And the first visualization revision
- revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config={}, dbkey=dbkey )
+ # Create and save first visualization revision
+ revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config=config, dbkey=dbkey )
visualization.latest_revision = revision
-
- # Persist
session = trans.sa_session
- session.add(visualization)
- session.add(revision)
+ session.add( revision )
session.flush()
return visualization
+
+ def save_visualization( self, trans, config, type, id=None, title=None, dbkey=None, slug=None, annotation=None ):
+ session = trans.sa_session
+
+ # Create/get visualization.
+ if not id:
+ # Create new visualization.
+ vis = self._create_visualization( trans, title, type, dbkey, slug, annotation )
+ else:
+ decoded_id = trans.security.decode_id( id )
+ vis = session.query( trans.model.Visualization ).get( decoded_id )
+
+ # Decode the payload
+ decoded_payload = config
+ # Create new VisualizationRevision that will be attached to the viz
+ vis_rev = trans.model.VisualizationRevision()
+ vis_rev.visualization = vis
+ vis_rev.title = vis.title
+ vis_rev.dbkey = dbkey
+
+ def unpack_track( track_json ):
+ """ Unpack a track from its json. """
+ return {
+ "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
+ "hda_ldda": track_json.get('hda_ldda', 'hda'),
+ "name": track_json['name'],
+ "track_type": track_json['track_type'],
+ "prefs": track_json['prefs'],
+ "mode": track_json['mode'],
+ "filters": track_json['filters'],
+ "tool_state": track_json['tool_state']
+ }
+
+ def unpack_collection( collection_json ):
+ """ Unpack a collection from its json. """
+ unpacked_drawables = []
+ drawables = collection_json[ 'drawables' ]
+ for drawable_json in drawables:
+ if 'track_type' in drawable_json:
+ drawable = unpack_track( drawable_json )
+ else:
+ drawable = unpack_collection( drawable_json )
+ unpacked_drawables.append( drawable )
+ return {
+ "name": collection_json.get( 'name', '' ),
+ "obj_type": collection_json[ 'obj_type' ],
+ "drawables": unpacked_drawables,
+ "prefs": collection_json.get( 'prefs' , [] ),
+ "filters": collection_json.get( 'filters', None )
+ }
+
+ # TODO: unpack and validate bookmarks:
+ def unpack_bookmarks( bookmarks_json ):
+ return bookmarks_json
+
+ # Unpack and validate view content.
+ view_content = unpack_collection( decoded_payload[ 'view' ] )
+ bookmarks = unpack_bookmarks( decoded_payload[ 'bookmarks' ] )
+ vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
+ # Viewport from payload
+ if 'viewport' in decoded_payload:
+ chrom = decoded_payload['viewport']['chrom']
+ start = decoded_payload['viewport']['start']
+ end = decoded_payload['viewport']['end']
+ overview = decoded_payload['viewport']['overview']
+ vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+
+ vis.latest_revision = vis_rev
+ session.add( vis_rev )
+ session.flush()
+ encoded_id = trans.security.encode_id( vis.id )
+ return { "vis_id": encoded_id, "url": url_for( action='browser', id=encoded_id ) }
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
@@ -432,6 +482,41 @@
config['viewport'] = latest_revision.config['viewport']
return config
+
+ # -- Helper functions --
+
+ def _create_visualization( self, trans, title, type, dbkey, slug=None, annotation=None ):
+ """ Create visualization but not first revision. Returns Visualization object. """
+ user = trans.get_user()
+
+ # Error checking.
+ title_err = slug_err = ""
+ if not title:
+ title_err = "visualization name is required"
+ elif slug and not VALID_SLUG_RE.match( slug ):
+ slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif slug and trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
+ slug_err = "visualization identifier must be unique"
+
+ if title_err or slug_err:
+ return { 'title_err': title_err, 'slug_err': slug_err }
+
+
+ # Create visualization
+ visualization = trans.model.Visualization( user=user, title=title, dbkey=dbkey, type=type )
+ if slug:
+ visualization.slug = slug
+ else:
+ self.create_item_slug( trans.sa_session, visualization )
+ if annotation:
+ annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
+
+ session = trans.sa_session
+ session.add( visualization )
+ session.flush()
+
+ return visualization
class UsesStoredWorkflow( SharableItemSecurity ):
""" Mixin for controllers that use StoredWorkflow objects. """
@@ -1255,7 +1340,9 @@
class Sharable:
""" Mixin for a controller that manages an item that can be shared. """
- # Implemented methods.
+
+ # -- Implemented methods. --
+
@web.expose
@web.require_login( "share Galaxy items" )
def set_public_username( self, trans, id, username, **kwargs ):
@@ -1268,42 +1355,50 @@
trans.sa_session.flush
return self.sharing( trans, id, **kwargs )
- # Abstract methods.
+ # -- Abstract methods. --
+
@web.expose
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
""" Set item slug asynchronously. """
raise "Unimplemented Method"
+
@web.expose
@web.require_login( "share Galaxy items" )
def sharing( self, trans, id, **kwargs ):
""" Handle item sharing. """
raise "Unimplemented Method"
+
@web.expose
@web.require_login( "share Galaxy items" )
def share( self, trans, id=None, email="", **kwd ):
""" Handle sharing an item with a particular user. """
raise "Unimplemented Method"
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug ):
""" Display item by username and slug. """
raise "Unimplemented Method"
- @web.expose
+
@web.json
@web.require_login( "get item name and link" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns item's name and link. """
raise "Unimplemented Method"
+
@web.expose
@web.require_login("get item content asynchronously")
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
raise "Unimplemented Method"
- # Helper methods.
+
+ # -- Helper methods. --
+
def _make_item_accessible( self, sa_session, item ):
""" Makes item accessible--viewable and importable--and sets item's slug. Does not flush/commit changes, however. Item must have name, user, importable, and slug attributes. """
item.importable = True
self.create_item_slug( sa_session, item )
+
def create_item_slug( self, sa_session, item ):
""" Create item slug. Slug is unique among user's importable items for item's class. Returns true if item's slug was set; false otherwise. """
if item.slug is None or item.slug == "":
@@ -1323,12 +1418,13 @@
slug = slug_base
count = 1
while sa_session.query( item.__class__ ).filter_by( user=item.user, slug=slug, importable=True ).count() != 0:
- # Slug taken; choose a new slug based on count. This approach can handle numerous histories with the same name gracefully.
+ # Slug taken; choose a new slug based on count. This approach can handle numerous items with the same name gracefully.
slug = '%s-%i' % ( slug_base, count )
count += 1
item.slug = slug
return True
return False
+
def get_item( self, trans, id ):
""" Return item based on id. """
raise "Unimplemented Method"
diff -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -163,7 +163,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter( self.model_class.user_id == trans.user.id )
-class TracksController( BaseUIController, UsesVisualization, UsesHistoryDatasetAssociation ):
+class TracksController( BaseUIController, UsesVisualization, UsesHistoryDatasetAssociation, Sharable ):
"""
Controller for track browser interface. Handles building a new browser from
datasets in the current history, and display of the resulting browser.
@@ -411,83 +411,8 @@
return result
@web.json
- def save( self, trans, **kwargs ):
- session = trans.sa_session
- vis_id = "undefined"
- if 'vis_id' in kwargs:
- vis_id = kwargs['vis_id'].strip('"')
- dbkey = kwargs['dbkey']
- # Lookup or create Visualization object
- if vis_id == "undefined": # new vis
- vis = model.Visualization()
- vis.user = trans.user
- vis.title = kwargs['title']
- vis.type = "trackster"
- vis.dbkey = dbkey
- session.add( vis )
- else:
- decoded_id = trans.security.decode_id( vis_id )
- vis = session.query( model.Visualization ).get( decoded_id )
- # Decode the payload
- decoded_payload = simplejson.loads( kwargs['payload'] )
- # Create new VisualizationRevision that will be attached to the viz
- vis_rev = model.VisualizationRevision()
- vis_rev.visualization = vis
- vis_rev.title = vis.title
- vis_rev.dbkey = dbkey
-
- def unpack_track( track_json ):
- """ Unpack a track from its json. """
- return {
- "dataset_id": trans.security.decode_id( track_json['dataset_id'] ),
- "hda_ldda": track_json.get('hda_ldda', "hda"),
- "name": track_json['name'],
- "track_type": track_json['track_type'],
- "prefs": track_json['prefs'],
- "mode": track_json['mode'],
- "filters": track_json['filters'],
- "tool_state": track_json['tool_state']
- }
-
- def unpack_collection( collection_json ):
- """ Unpack a collection from its json. """
- unpacked_drawables = []
- drawables = collection_json[ 'drawables' ]
- for drawable_json in drawables:
- if 'track_type' in drawable_json:
- drawable = unpack_track( drawable_json )
- else:
- drawable = unpack_collection( drawable_json )
- unpacked_drawables.append( drawable )
- return {
- "name": collection_json.get( 'name', '' ),
- "obj_type": collection_json[ 'obj_type' ],
- "drawables": unpacked_drawables,
- "prefs": collection_json.get( 'prefs' , [] ),
- "filters": collection_json.get( 'filters', None )
- }
-
- # TODO: unpack and validate bookmarks:
- def unpack_bookmarks( bookmarks_json ):
- return bookmarks_json
-
- # Unpack and validate view content.
- view_content = unpack_collection( decoded_payload[ 'view' ] )
- bookmarks = unpack_bookmarks( decoded_payload[ 'bookmarks' ] )
- vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
- # Viewport from payload
- if 'viewport' in decoded_payload:
- chrom = decoded_payload['viewport']['chrom']
- start = decoded_payload['viewport']['start']
- end = decoded_payload['viewport']['end']
- overview = decoded_payload['viewport']['overview']
- vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
-
- vis.latest_revision = vis_rev
- session.add( vis_rev )
- session.flush()
- encoded_id = trans.security.encode_id(vis.id)
- return { "vis_id": encoded_id, "url": url_for( action='browser', id=encoded_id ) }
+ def save( self, trans, config, type, id=None, title=None, dbkey=None, annotation=None ):
+ return self.save_visualization( trans, from_json_string( config ), type, id, title, dbkey, annotation )
@web.expose
@web.require_login( "see all available libraries" )
diff -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 -r fdc3f20a46d3af6ac89bd02baef3db09eed3f235 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako
+++ b/templates/tracks/browser.mako
@@ -209,10 +209,11 @@
url: "${h.url_for( action='save' )}",
type: "POST",
data: {
- 'vis_id': view.vis_id,
+ 'id': view.vis_id,
'title': view.name,
'dbkey': view.dbkey,
- 'payload': JSON.stringify(payload)
+ 'type': 'trackster',
+ 'config': JSON.stringify(payload)
},
dataType: "json",
success: function(vis_info) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Provide a modified check_galaxy.py that is suitable for use with nagios.
by Bitbucket 26 May '12
by Bitbucket 26 May '12
26 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f4c4ba7be3d1/
changeset: f4c4ba7be3d1
user: natefoo
date: 2012-05-26 21:00:36
summary: Provide a modified check_galaxy.py that is suitable for use with nagios.
affected #: 3 files
diff -r 62bdb265d3007d4f761a1defa82a44d888c30bfd -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 contrib/nagios/README
--- /dev/null
+++ b/contrib/nagios/README
@@ -0,0 +1,1 @@
+Nagios checks for Galaxy. check_galaxy is used to call check_galaxy.py.
diff -r 62bdb265d3007d4f761a1defa82a44d888c30bfd -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 contrib/nagios/check_galaxy
--- /dev/null
+++ b/contrib/nagios/check_galaxy
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+if [ -z "$3" ]; then
+ echo "usage: check_galaxy <server><username><password>"
+ exit 3
+fi
+
+here=`dirname $0`
+var="$HOME/.check_galaxy/$1"
+
+touch $var/iterations
+iterations=`cat $var/iterations`
+if [ -z "$iterations" ]; then
+ iterations=0
+fi
+
+new_history=''
+if [ $iterations -gt 96 ]; then
+ new_history='-n'
+ echo 0 > $var/iterations
+else
+ echo `expr $iterations + 1` > $var/iterations
+fi
+
+date >> $var/log
+status=`python $here/check_galaxy.py $new_history $1 $2 $3 2>&1 | tee -a $var/log | tail -n 1`
+
+echo "$status"
+
+case "$status" in
+ "Exception: Tool never finished")
+ exit 1
+ ;;
+ "OK")
+ exit 0
+ ;;
+ *)
+ exit 2
+ ;;
+esac
diff -r 62bdb265d3007d4f761a1defa82a44d888c30bfd -r f4c4ba7be3d17fbf7924e9f8d6ae005960fcc4c0 contrib/nagios/check_galaxy.py
--- /dev/null
+++ b/contrib/nagios/check_galaxy.py
@@ -0,0 +1,393 @@
+#!/usr/bin/env python
+"""
+check_galaxy can be run by hand, although it is meant to run from cron
+via the check_galaxy.sh script in Galaxy's cron/ directory.
+"""
+
+import socket, sys, os, time, tempfile, filecmp, htmllib, formatter, getopt
+from user import home
+
+import warnings
+with warnings.catch_warnings():
+ warnings.simplefilter('ignore')
+ import twill
+ import twill.commands as tc
+
+# options
+if os.environ.has_key( "DEBUG" ):
+ debug = os.environ["DEBUG"]
+else:
+ debug = False
+
+test_data_dir = os.path.join( os.path.dirname( __file__ ), 'check_galaxy_data' )
+# what tools to run - not so pretty
+tools = {
+ "Extract+genomic+DNA+1" :
+ [
+ {
+ "inputs" :
+ (
+ {
+ "file_path" : os.path.join( test_data_dir, "1.bed" ),
+ "dbkey" : "hg17",
+ },
+
+ )
+ },
+ { "check_file" : os.path.join( test_data_dir, "extract_genomic_dna_out1.fasta" ) },
+ {
+ "tool_run_options" :
+ {
+ "input" : "1.bed",
+ "interpret_features" : "yes",
+ "index_source" : "cached",
+ "out_format" : "fasta"
+ }
+ }
+ ]
+}
+
+# handle arg(s)
+def usage():
+ print "usage: check_galaxy.py <server><username><password>"
+ sys.exit(1)
+
+try:
+ opts, args = getopt.getopt( sys.argv[1:], 'n' )
+except getopt.GetoptError, e:
+ print str(e)
+ usage()
+if len( args ) < 1:
+ usage()
+server = args[0]
+username = args[1]
+password = args[2]
+
+if server.endswith(".g2.bx.psu.edu"):
+ if debug:
+ print "Checking a PSU Galaxy server, using maint file"
+ maint = "/errordocument/502/%s/maint" % args[0].split('.', 1)[0]
+else:
+ maint = None
+
+new_history = False
+for o, a in opts:
+ if o == "-n":
+ if debug:
+ print "Specified -n, will create a new history"
+ new_history = True
+ else:
+ usage()
+
+# state information
+var_dir = os.path.join( home, ".check_galaxy", server )
+if not os.access( var_dir, os.F_OK ):
+ os.makedirs( var_dir, 0700 )
+
+# default timeout for twill browser is never
+socket.setdefaulttimeout(300)
+
+# user-agent
+tc.agent("Mozilla/5.0 (compatible; check_galaxy/0.1)")
+tc.config('use_tidy', 0)
+
+class Browser:
+
+ def __init__(self):
+ self.server = server
+ self.maint = maint
+ self.tool = None
+ self.tool_opts = None
+ self.id = None
+ self.status = None
+ self.check_file = None
+ self.hid = None
+ self.cookie_jar = os.path.join( var_dir, "cookie_jar" )
+ dprint("cookie jar path: %s" % self.cookie_jar)
+ if not os.access(self.cookie_jar, os.R_OK):
+ dprint("no cookie jar at above path, creating")
+ tc.save_cookies(self.cookie_jar)
+ tc.load_cookies(self.cookie_jar)
+
+ def get(self, path):
+ tc.go("http://%s%s" % (self.server, path))
+ tc.code(200)
+
+ def reset(self):
+ self.tool = None
+ self.tool_opts = None
+ self.id = None
+ self.status = None
+ self.check_file = None
+ self.delete_datasets()
+ self.get("/root/history")
+ p = didParser()
+ p.feed(tc.browser.get_html())
+ if len(p.dids) > 0:
+ print "Remaining datasets ids:", " ".join( p.dids )
+ raise Exception, "History still contains datasets after attempting to delete them"
+ if new_history:
+ self.get("/history/delete_current")
+ tc.save_cookies(self.cookie_jar)
+
+ def check_redir(self, url):
+ try:
+ tc.get_browser()._browser.set_handle_redirect(False)
+ tc.go(url)
+ tc.code(302)
+ tc.get_browser()._browser.set_handle_redirect(True)
+ dprint( "%s is returning redirect (302)" % url )
+ return(True)
+ except twill.errors.TwillAssertionError, e:
+ tc.get_browser()._browser.set_handle_redirect(True)
+ dprint( "%s is not returning redirect (302): %s" % (url, e) )
+ code = tc.browser.get_code()
+ if code == 502:
+ is_maint = self.check_maint()
+ if is_maint:
+ dprint( "Galaxy is down, but a maint file was found, so not sending alert" )
+ sys.exit(0)
+ else:
+ print "Galaxy is down (code 502)"
+ sys.exit(1)
+ return(False)
+
+ # checks for a maint file
+ def check_maint(self):
+ if self.maint is None:
+ #dprint( "Warning: unable to check maint file for %s" % self.server )
+ return(False)
+ try:
+ self.get(self.maint)
+ return(True)
+ except twill.errors.TwillAssertionError, e:
+ return(False)
+
+ def login(self, user, pw):
+ self.get("/user/login")
+ tc.fv("1", "email", user)
+ tc.fv("1", "password", pw)
+ tc.submit("Login")
+ tc.code(200)
+ if len(tc.get_browser().get_all_forms()) > 0:
+ # uh ohs, fail
+ p = userParser()
+ p.feed(tc.browser.get_html())
+ if p.no_user:
+ dprint("user does not exist, will try creating")
+ self.create_user(user, pw)
+ elif p.bad_pw:
+ raise Exception, "Password is incorrect"
+ else:
+ raise Exception, "Unknown error logging in"
+ tc.save_cookies(self.cookie_jar)
+
+ def create_user(self, user, pw):
+ self.get("/user/create")
+ tc.fv("1", "email", user)
+ tc.fv("1", "password", pw)
+ tc.fv("1", "confirm", pw)
+ tc.submit("Submit")
+ tc.code(200)
+ if len(tc.get_browser().get_all_forms()) > 0:
+ p = userParser()
+ p.feed(tc.browser.get_html())
+ if p.already_exists:
+ raise Exception, 'The user you were trying to create already exists'
+
+ def upload(self, input):
+ self.get("/tool_runner/index?tool_id=upload1")
+ tc.fv("1","file_type", "bed")
+ tc.fv("1","dbkey", input.get('dbkey', '?'))
+ tc.formfile("1","file_data", input['file_path'])
+ tc.submit("runtool_btn")
+ tc.code(200)
+
+ def runtool(self):
+ self.get("/tool_runner/index?tool_id=%s" % self.tool)
+ for k, v in self.tool_opts.items():
+ tc.fv("1", k, v)
+ tc.submit("runtool_btn")
+ tc.code(200)
+
+ def wait(self):
+ sleep_amount = 1
+ count = 0
+ maxiter = 16
+ while count < maxiter:
+ count += 1
+ self.get("/root/history")
+ page = tc.browser.get_html()
+ if page.find( '<!-- running: do not change this comment, used by TwillTestCase.wait -->' ) > -1:
+ time.sleep( sleep_amount )
+ sleep_amount += 1
+ else:
+ break
+ if count == maxiter:
+ raise Exception, "Tool never finished"
+
+ def check_status(self):
+ self.get("/root/history")
+ p = historyParser()
+ p.feed(tc.browser.get_html())
+ if p.status != "ok":
+ self.get("/datasets/%s/stderr" % p.id)
+ print tc.browser.get_html()
+ raise Exception, "HDA %s NOT OK: %s" % (p.id, p.status)
+ self.id = p.id
+ self.status = p.status
+ #return((p.id, p.status))
+
+ def diff(self):
+ self.get("/datasets/%s/display?to_ext=%s" % (self.id, self.tool_opts.get('out_format', 'fasta')))
+ data = tc.browser.get_html()
+ tmp = tempfile.mkstemp()
+ dprint("tmp file: %s" % tmp[1])
+ tmpfh = os.fdopen(tmp[0], 'w')
+ tmpfh.write(data)
+ tmpfh.close()
+ if filecmp.cmp(tmp[1], self.check_file):
+ dprint("Tool output is as expected")
+ else:
+ if not debug:
+ os.remove(tmp[1])
+ raise Exception, "Tool output differs from expected"
+ if not debug:
+ os.remove(tmp[1])
+
+ def delete_datasets(self):
+ self.get("/root/history")
+ p = didParser()
+ p.feed(tc.browser.get_html())
+ dids = p.dids
+ for did in dids:
+ self.get("/datasets/%s/delete" % did)
+
+ def check_if_logged_in(self):
+ self.get("/user?cntrller=user")
+ p = loggedinParser()
+ p.feed(tc.browser.get_html())
+ return p.logged_in
+
+class userParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.in_span = False
+ self.in_div = False
+ self.no_user = False
+ self.bad_pw = False
+ self.already_exists = False
+ def start_span(self, attrs):
+ self.in_span = True
+ def start_div(self, attrs):
+ self.in_div = True
+ def end_span(self):
+ self.in_span = False
+ def end_div(self):
+ self.in_div = False
+ def handle_data(self, data):
+ if self.in_span or self.in_div:
+ if data == "No such user (please note that login is case sensitive)":
+ self.no_user = True
+ elif data == "Invalid password":
+ self.bad_pw = True
+ elif data == "User with that email already exists":
+ self.already_exists = True
+
+class historyParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.status = None
+ self.id = None
+ def start_div(self, attrs):
+ # find the top history item
+ for i in attrs:
+ if i[0] == "class" and i[1].startswith("historyItemWrapper historyItem historyItem-"):
+ self.status = i[1].rsplit("historyItemWrapper historyItem historyItem-", 1)[1]
+ dprint("status: %s" % self.status)
+ if i[0] == "id" and i[1].startswith("historyItem-"):
+ self.id = i[1].rsplit("historyItem-", 1)[1]
+ dprint("id: %s" % self.id)
+ if self.status is not None:
+ self.reset()
+
+class didParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.dids = []
+ def start_div(self, attrs):
+ for i in attrs:
+ if i[0] == "id" and i[1].startswith("historyItemContainer-"):
+ self.dids.append( i[1].rsplit("historyItemContainer-", 1)[1] )
+ dprint("got a dataset id: %s" % self.dids[-1])
+
+class loggedinParser(htmllib.HTMLParser):
+ def __init__(self):
+ htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+ self.in_p = False
+ self.logged_in = False
+ def start_p(self, attrs):
+ self.in_p = True
+ def end_p(self):
+ self.in_p = False
+ def handle_data(self, data):
+ if self.in_p:
+ if data == "You are currently not logged in.":
+ self.logged_in = False
+ elif data.startswith( "You are currently logged in as " ):
+ self.logged_in = True
+
+def dprint(str):
+ if debug:
+ print str
+
+# do stuff here
+if __name__ == "__main__":
+
+ dprint("checking %s" % server)
+
+ b = Browser()
+
+ # login (or not)
+ if b.check_if_logged_in():
+ dprint("we are already logged in (via cookies), hooray!")
+ else:
+ dprint("not logged in... logging in")
+ b.login(username, password)
+
+ for tool, params in tools.iteritems():
+
+ check_file = ""
+
+ # make sure history and state is clean
+ b.reset()
+ b.tool = tool
+
+ # get all the tool run conditions
+ for dict in params:
+ for k, v in dict.items():
+ if k == 'inputs':
+ for input in v:
+ b.upload(input)
+ b.wait()
+ elif k == 'check_file':
+ b.check_file = v
+ elif k == 'tool_run_options':
+ b.tool_opts = v
+ else:
+ raise Exception, "Unknown key in tools dict: %s" % k
+
+ b.runtool()
+ b.wait()
+ b.check_status()
+ b.diff()
+ b.delete_datasets()
+
+ # by this point, everything else has succeeded. there should be no maint.
+ is_maint = b.check_maint()
+ if is_maint:
+ print "Galaxy is up and fully functional, but a maint file is in place."
+ sys.exit(1)
+
+ print "OK"
+ sys.exit(0)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/62bdb265d300/
changeset: 62bdb265d300
user: jgoecks
date: 2012-05-26 00:23:43
summary: Infrastructure for managing visualizations: (a) support for creating multiple types of visualizations; (b) a placeholder visualization API controller; (c) abstract visualization creation so that it can be reused in multiple controllers.
affected #: 6 files
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2445,12 +2445,13 @@
self.user = None
class Visualization( object ):
- def __init__( self, user=None, type=None, title=None, dbkey=None, latest_revision=None ):
+ def __init__( self, user=None, type=None, title=None, dbkey=None, slug=None, latest_revision=None ):
self.id = None
self.user = user
self.type = type
self.title = title
self.dbkey = dbkey
+ self.slug = slug
self.latest_revision = latest_revision
self.revisions = []
if self.latest_revision:
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/visualization/__init__.py
--- a/lib/galaxy/visualization/__init__.py
+++ b/lib/galaxy/visualization/__init__.py
@@ -1,3 +1,3 @@
"""
Package for Galaxy visualization plugins.
-"""
\ No newline at end of file
+"""
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/api/visualizations.py
--- /dev/null
+++ b/lib/galaxy/web/api/visualizations.py
@@ -0,0 +1,28 @@
+from galaxy import web
+from galaxy.web.base.controller import BaseController, BaseAPIController
+
+class VisualizationsController( BaseAPIController ):
+ """
+ RESTful controller for interactions with visualizations.
+ """
+
+ @web.expose_api
+ def index( self, trans, **kwds ):
+ """
+ GET /api/visualizations:
+ """
+ pass
+
+ @web.json
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/visualizations/{viz_id}
+ """
+ pass
+
+ @web.expose_api
+ def create( self, trans, payload, **kwd ):
+ """
+ POST /api/visualizations
+ """
+ pass
\ No newline at end of file
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -7,6 +7,7 @@
from galaxy import config, tools, web, util
from galaxy.util import inflector
from galaxy.util.hash_util import *
+from galaxy.util.sanitize_html import sanitize_html
from galaxy.web import error, form, url_for
from galaxy.model.orm import *
from galaxy.workflow.modules import *
@@ -275,8 +276,45 @@
class UsesVisualization( SharableItemSecurity ):
""" Mixin for controllers that use Visualization objects. """
+
+ viz_types = [ "trackster", "circos" ]
len_files = None
+
+ def create_visualization( self, trans, title, slug, type, dbkey, annotation=None, config={} ):
+ user = trans.get_user()
+
+ # Error checking.
+ title_err = slug_err = ""
+ if not title:
+ title_err = "visualization name is required"
+ elif not slug:
+ slug_err = "visualization id is required"
+ elif not VALID_SLUG_RE.match( slug ):
+ slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+ elif trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
+ slug_err = "visualization id must be unique"
+
+ if title_err or slug_err:
+ return { 'title_err': title_err, 'slug_err': slug_err }
+
+ # Create visualization
+ visualization = trans.model.Visualization( user=user, title=title, slug=slug, dbkey=dbkey, type=type )
+ if annotation:
+ annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
+
+ # And the first visualization revision
+ revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config={}, dbkey=dbkey )
+ visualization.latest_revision = revision
+
+ # Persist
+ session = trans.sa_session
+ session.add(visualization)
+ session.add(revision)
+ session.flush()
+
+ return visualization
def _get_dbkeys( self, trans ):
""" Returns all valid dbkeys that a user can use in a visualization. """
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -132,6 +132,7 @@
webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
+ webapp.api_mapper.resource( 'visualization', 'visualizations', path_prefix='/api' )
webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
#webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
diff -r 4288ffb1bd16d8f014dc06759c8073919e7347c8 -r 62bdb265d3007d4f761a1defa82a44d888c30bfd lib/galaxy/web/controllers/visualization.py
--- a/lib/galaxy/web/controllers/visualization.py
+++ b/lib/galaxy/web/controllers/visualization.py
@@ -4,7 +4,6 @@
from galaxy.web.framework.helpers import time_ago, grids, iff
from galaxy.util.sanitize_html import sanitize_html
-
class VisualizationListGrid( grids.Grid ):
# Grid definition
title = "Saved Visualizations"
@@ -14,6 +13,7 @@
columns = [
grids.TextColumn( "Title", key="title", attach_popup=True,
link=( lambda item: dict( controller="tracks", action="browser", id=item.id ) ) ),
+ grids.TextColumn( "Type", key="type" ),
grids.TextColumn( "Dbkey", key="dbkey" ),
grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False ),
@@ -346,51 +346,31 @@
@web.expose
@web.require_login( "create visualizations" )
- def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="" ):
+ def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="",
+ visualization_type="" ):
"""
Create a new visualization
"""
- user = trans.get_user()
visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
if trans.request.method == "POST":
- if not visualization_title:
- visualization_title_err = "visualization name is required"
- elif not visualization_slug:
- visualization_slug_err = "visualization id is required"
- elif not VALID_SLUG_RE.match( visualization_slug ):
- visualization_slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
- elif trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=visualization_slug, deleted=False ).first():
- visualization_slug_err = "visualization id must be unique"
+ rval = self.create_visualization( trans, title=visualization_title,
+ slug=visualization_slug,
+ annotation=visualization_annotation,
+ dbkey=visualization_dbkey,
+ type=visualization_type )
+ if isinstance( rval, dict ):
+ # Found error creating viz.
+ visualization_title_err = rval[ 'title_err' ]
+ visualization_slug_err = rval[ 'slug_err' ]
else:
- # Create the new stored visualization
- visualization = model.Visualization()
- visualization.title = visualization_title
- visualization.slug = visualization_slug
- visualization.dbkey = visualization_dbkey
- visualization.type = 'trackster' # HACK: set visualization type to trackster since it's the only viz
- visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), visualization, visualization_annotation )
- visualization.user = user
-
- # And the first (empty) visualization revision
- visualization_revision = model.VisualizationRevision()
- visualization_revision.title = visualization_title
- visualization_revision.config = {}
- visualization_revision.dbkey = visualization_dbkey
- visualization_revision.visualization = visualization
- visualization.latest_revision = visualization_revision
-
- # Persist
- session = trans.sa_session
- session.add(visualization)
- session.add(visualization_revision)
- session.flush()
-
+ # Successfully created viz.
return trans.response.send_redirect( web.url_for( action='list' ) )
-
+
+ viz_type_options = [ ( t, t ) for t in self.viz_types ]
return trans.show_form(
web.FormBuilder( web.url_for(), "Create new visualization", submit_text="Submit" )
.add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+ .add_select( "visualization_type", "Type", options=viz_type_options, error=None )
.add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
help="""A unique identifier that will be used for
public links to this visualization. A default is generated
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Partial implementation of Circos visualization using D3.
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4288ffb1bd16/
changeset: 4288ffb1bd16
user: jgoecks
date: 2012-05-25 21:13:25
summary: Partial implementation of Circos visualization using D3.
affected #: 7 files
Diff too large to display.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: guru: Suppressed R package-loading messages which were previously being written into stderr
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8cbbb813f286/
changeset: 8cbbb813f286
user: guru
date: 2012-05-25 16:54:03
summary: Suppressed R package-loading messages which were previously being written into stderr
affected #: 1 file
diff -r 8335baa6881520cabfd3eb41986d7df203dc3183 -r 8cbbb813f2865de22ff671c980816cde5e225be5 tools/regVariation/logistic_regression_vif.py
--- a/tools/regVariation/logistic_regression_vif.py
+++ b/tools/regVariation/logistic_regression_vif.py
@@ -5,16 +5,10 @@
from rpy import *
import numpy
-#export PYTHONPATH=~/galaxy/lib/
-
def stop_err(msg):
sys.stderr.write(msg)
sys.exit()
-#infile = 'logreg_inp.tab'
-#y_col=3
-#x_cols=[1,2,3]
-#outfile='logreg_out.txt'
-#python logistic_regression_vif.py logreg_inp.tab 4 1,2,3 logreg_out2.tabular # running test
+
infile = sys.argv[1]
y_col = int(sys.argv[2])-1
x_cols = sys.argv[3].split(',')
@@ -84,17 +78,11 @@
set_default_mode(NO_CONVERSION)
try:
linear_model = r.glm(r("y ~ x"), data = r.na_exclude(dat),family="binomial")
- #r('library(car)')
- #r.assign('dat',dat)
- #r.assign('ncols',len(x_cols))
- #r.vif(r('glm(dat$y ~ ., data = na.exclude(data.frame(as.matrix(dat$x,ncol=ncols))->datx),family="binomial")')).as_py()
-
except RException, rex:
stop_err("Error performing logistic regression on the input data.\nEither the response column or one of the predictor columns contain only non-numeric or invalid values.")
if len(x_cols)>1:
try:
-
- r('library(car)')
+ r('suppressPackageStartupMessages(library(car))')
r.assign('dat',dat)
r.assign('ncols',len(x_cols))
vif=r.vif(r('glm(dat$y ~ ., data = na.exclude(data.frame(as.matrix(dat$x,ncol=ncols))->datx),family="binomial")'))
@@ -163,9 +151,6 @@
rsq= r.round(float((null_deviance-residual_deviance)/null_deviance), digits=5)
null_deviance= r.round(float(null_deviance), digits=5)
residual_deviance= r.round(float(residual_deviance), digits=5)
-
- #rsq = r.round(float(rsq), digits=5)
-
except:
pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: fubar: Reverse order of job manager and instance in warning
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8335baa68815/
changeset: 8335baa68815
user: fubar
date: 2012-05-25 07:48:49
summary: Reverse order of job manager and instance in warning
affected #: 1 file
diff -r 3f1150fee33c6d7270549e0760fd84b7c35fdb50 -r 8335baa6881520cabfd3eb41986d7df203dc3183 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -2380,7 +2380,7 @@
msg = None
status = None
if self.app.config.job_manager != self.app.config.server_name:
- return trans.show_error_message( 'This Galaxy instance (%s) is not the job manager (%s). If using multiple servers, please directly access the job manager instance to manage jobs.' % ( self.app.config.job_manager,self.app.config.server_name) )
+ return trans.show_error_message( 'This Galaxy instance (%s) is not the job manager (%s). If using multiple servers, please directly access the job manager instance to manage jobs.' % (self.app.config.server_name, self.app.config.job_manager) )
job_ids = util.listify( stop )
if job_ids and stop_msg in [ None, '' ]:
msg = 'Please enter an error message to display to the user describing why the job was terminated'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: fubar: Fix for bogus double negative compare - allows job management from admin screen when only one job manager running
by Bitbucket 25 May '12
by Bitbucket 25 May '12
25 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3f1150fee33c/
changeset: 3f1150fee33c
user: fubar
date: 2012-05-25 07:40:44
summary: Fix for bogus double negative compare - allows job management from admin screen when only one job manager running
affected #: 1 file
diff -r 3fa05d052e820d21f1a6e5ae206d90edcc1a8782 -r 3f1150fee33c6d7270549e0760fd84b7c35fdb50 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -2379,8 +2379,8 @@
deleted = []
msg = None
status = None
- if not self.app.config.job_manager != self.app.config.server_name:
- return trans.show_error_message( 'This Galaxy instance is not the job manager. If using multiple servers, please directly access the job manager instance to manage jobs.' )
+ if self.app.config.job_manager != self.app.config.server_name:
+ return trans.show_error_message( 'This Galaxy instance (%s) is not the job manager (%s). If using multiple servers, please directly access the job manager instance to manage jobs.' % ( self.app.config.job_manager,self.app.config.server_name) )
job_ids = util.listify( stop )
if job_ids and stop_msg in [ None, '' ]:
msg = 'Please enter an error message to display to the user describing why the job was terminated'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3fa05d052e82/
changeset: 3fa05d052e82
user: greg
date: 2012-05-24 22:47:25
summary: Fix a typo in the community common cntroller.
affected #: 1 file
diff -r 676546e7038ae833e524bbc4b4b26c81c71954bd -r 3fa05d052e820d21f1a6e5ae206d90edcc1a8782 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -774,7 +774,7 @@
break
if in_ctx:
tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, dir=work_dir )
- element_tree = util.parse_xml( tmp_config )
+ element_tree = util.parse_xml( tmp_tool_config )
element_tree_root = element_tree.getroot()
# Look for code files required by the tool config.
tmp_code_files = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Enhance setting metadata on tool shed repositories tha tinclude tools that require entries in the tool_data_table_conf.xml file.
by Bitbucket 24 May '12
by Bitbucket 24 May '12
24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/676546e7038a/
changeset: 676546e7038a
user: greg
date: 2012-05-24 22:36:32
summary: Enhance setting metadata on tool shed repositories tha tinclude tools that require entries in the tool_data_table_conf.xml file.
affected #: 5 files
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -8,6 +8,7 @@
from galaxy import eggs
import pkg_resources
+pkg_resources.require('ssh' )
pkg_resources.require( 'Fabric' )
from fabric.api import env, lcd, local
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,5 @@
import sys, os, tempfile, shutil, logging, string, urllib2
+import galaxy.tools.data
from datetime import date, datetime, timedelta
from time import strftime, gmtime
from galaxy import util
@@ -528,10 +529,11 @@
exported_workflow_dict = from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
+ if 'tools' in metadata_dict:
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
return metadata_dict
def generate_tool_guid( repository_clone_url, tool ):
"""
@@ -1026,6 +1028,8 @@
# Reload the tool into the local list of repository_tools_tups.
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
return repository_tools_tups
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ):
"""
@@ -1398,6 +1402,9 @@
if uninstall:
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
+def reset_tool_data_tables( app ):
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ app.tool_data_tables = galaxy.tools.data.ToolDataTableManager( app.config.tool_data_table_config_path )
def strip_path( fpath ):
file_path, file_name = os.path.split( fpath )
return file_name
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -7,7 +7,7 @@
from galaxy.util.hash_util import *
from galaxy.util.shed_util import copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_configured_ui, handle_sample_tool_data_table_conf_file
-from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, strip_path, to_html_escaped, to_html_str, update_repository
+from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, strip_path, to_html_escaped, to_html_str, update_repository
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -245,6 +245,8 @@
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files.append( ( xml_file_in_ctx, correction_msg ) )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
return sample_files_copied, can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata reecords associated with the repository that have a changeset_revision that is not in changeset_revisions.
@@ -437,16 +439,20 @@
work_dir = make_tmp_directory()
datatypes_config = get_config( 'datatypes_conf.xml', repo, repo_dir, ctx, work_dir )
if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
sample_files = get_sample_files( repo, repo_dir, dir=work_dir )
+ # Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if sample_files:
trans.app.config.tool_data_path = work_dir
for filename in ctx:
# Find all tool configs.
ctx_file_name = strip_path( filename )
if ctx_file_name not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- valid, tool = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
- if valid and tool is not None:
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
+ if is_tool_config and valid and tool is not None:
sample_files_copied, can_set_metadata, invalid_files = check_tool_input_params( trans,
repo,
repo_dir,
@@ -484,9 +490,11 @@
os.unlink( os.path.join( original_tool_data_path, copied_file ) )
except:
pass
- elif tool is not None:
- # We have a tool config but it is invalid.
- invalid_files.append( ( ctx_file_name, 'Problems loading tool.' ) )
+ elif is_tool_config:
+ if not error_message:
+ error_message = 'Unknown problems loading tool.'
+ # We have a tool config but it is invalid or the tool does not properly load.
+ invalid_files.append( ( ctx_file_name, error_message ) )
invalid_tool_configs.append( ctx_file_name )
# Find all exported workflows.
elif filename.endswith( '.ga' ):
@@ -498,15 +506,18 @@
metadata_dict = generate_workflow_metadata( '', exported_workflow_dict, metadata_dict )
except Exception, e:
invalid_files.append( ( ctx_file_name, str( e ) ) )
- # Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, repo_dir, ctx, work_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
+ if 'tools' in metadata_dict:
+ # Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, repo_dir, ctx, work_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
if sample_files:
# Don't forget to reset the value of trans.app.config.tool_data_path!
trans.app.config.tool_data_path = original_tool_data_path
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
try:
shutil.rmtree( work_dir )
except:
@@ -737,9 +748,14 @@
repository = get_repository( trans, repository_id )
repo_files_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_files_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
tool = None
message = ''
work_dir = make_tmp_directory()
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
+ if tool_data_table_config:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if changeset_revision == repository.tip:
try:
copied_tool_config = copy_file_from_disk( tool_config, repo_files_dir, work_dir )
@@ -750,7 +766,6 @@
else:
# Get the tool config file name from the hgweb url, something like: /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
old_tool_config_file_name = tool_config.split( '/' )[ -1 ]
- ctx = get_changectx_for_changeset( repo, changeset_revision )
in_ctx = False
for ctx_file in ctx.files():
ctx_file_name = strip_path( ctx_file )
@@ -784,14 +799,18 @@
pass
else:
tool = None
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
try:
shutil.rmtree( work_dir )
except:
pass
return tool, message
def load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, dir ):
+ is_tool_config = False
tool = None
valid = False
+ error_message = ''
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir=dir )
if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
or check_bz2( tmp_config )[ 0 ] or check_zip( tmp_config ) ):
@@ -799,11 +818,15 @@
# Make sure we're looking at a tool config and not a display application config or something else.
element_tree = util.parse_xml( tmp_config )
element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
+ is_tool_config = element_tree_root.tag == 'tool'
except Exception, e:
log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
- is_tool = False
- if is_tool:
+ is_tool_config = False
+ if is_tool_config:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
+ if tool_data_table_config:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
# Look for code files required by the tool config. The directory to which dir refers should be removed by the caller.
for code_elem in element_tree_root.findall( 'code' ):
code_file_name = code_elem.get( 'file' )
@@ -814,9 +837,17 @@
try:
tool = load_tool( trans, tmp_config )
valid = True
- except:
+ except KeyError, e:
valid = False
- return valid, tool
+ error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
+ error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+ error_message += 'this error. '
+ except Exception, e:
+ valid = False
+ error_message = str( e )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return is_tool_config, valid, tool, error_message
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
Compare the last saved metadata for each tool in the repository with the new metadata
@@ -1049,13 +1080,11 @@
error_message, status = reset_all_metadata_on_repository( trans, id, **kwd )
if error_message:
# If there is an error, display it.
- message += '%s<br/>%s ' % ( message, error_message )
- status = 'error'
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
id=id,
- message=message,
- status=status ) )
+ message=error_message,
+ status='error' ) )
def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
# This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge
# is not supported by the mercurial API.
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -950,7 +950,12 @@
for filename in ctx:
# Find all tool configs in this repository changeset_revision.
if filename not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- valid, tool = load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, work_dir )
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans,
+ repo,
+ repo_dir,
+ ctx,
+ filename,
+ work_dir )
if valid and tool is not None:
tool_guids.append( generate_tool_guid( trans, repository, tool ) )
tool_guids.sort()
diff -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 -r 676546e7038ae833e524bbc4b4b26c81c71954bd lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util import get_configured_ui, handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import get_configured_ui, reset_tool_data_tables, handle_sample_tool_data_table_conf_file
from galaxy import eggs
eggs.require('mercurial')
@@ -168,6 +168,8 @@
status=status ) )
else:
status = 'error'
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/community/repository/upload.mako',
repository=repository,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Full and proper sorting for GTF datasets: sort by, in order, transcript_id, chrom, and start.
by Bitbucket 24 May '12
by Bitbucket 24 May '12
24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/95c05fcbbceb/
changeset: 95c05fcbbceb
user: jgoecks
date: 2012-05-24 19:26:48
summary: Full and proper sorting for GTF datasets: sort by, in order, transcript_id, chrom, and start.
affected #: 2 files
diff -r a0d6e382f2e198e46613705ad8a3962d0bc58fca -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 lib/galaxy/datatypes/util/gff_util.py
--- a/lib/galaxy/datatypes/util/gff_util.py
+++ b/lib/galaxy/datatypes/util/gff_util.py
@@ -1,9 +1,12 @@
"""
Provides utilities for working with GFF files.
"""
+
+import copy
import pkg_resources; pkg_resources.require( "bx-python" )
from bx.intervals.io import *
from bx.tabular.io import Header, Comment
+from galaxy.util.odict import odict
class GFFInterval( GenomicInterval ):
"""
@@ -48,7 +51,8 @@
def __init__( self, reader, chrom_col=0, feature_col=2, start_col=3, end_col=4, \
strand_col=6, score_col=5, default_strand='.', fix_strand=False, intervals=[], \
raw_size=0 ):
- GFFInterval.__init__( self, reader, intervals[0].fields, chrom_col, feature_col, \
+ # Use copy so that first interval and feature do not share fields.
+ GFFInterval.__init__( self, reader, copy.deepcopy( intervals[0].fields ), chrom_col, feature_col, \
start_col, end_col, strand_col, score_col, default_strand, \
fix_strand=fix_strand )
self.intervals = intervals
@@ -356,4 +360,46 @@
for name, value in attrs.items():
attrs_strs.append( format_string % ( name, value ) )
return " ; ".join( attrs_strs )
-
\ No newline at end of file
+
+def read_unordered_gtf( iterator ):
+ """
+ Returns GTF features found in an iterator. GTF lines need not be ordered
+ or clustered for reader to work. Reader returns GFFFeature objects sorted
+ by transcript_id, chrom, and start position.
+ """
+
+ # Aggregate intervals by transcript_id.
+ feature_intervals = odict()
+ for count, line in enumerate( iterator ):
+ line_attrs = parse_gff_attributes( line.split('\t')[8] )
+ transcript_id = line_attrs[ 'transcript_id' ]
+ if transcript_id in feature_intervals:
+ feature = feature_intervals[ transcript_id ]
+ else:
+ feature = []
+ feature_intervals[ transcript_id ] = feature
+ feature.append( GFFInterval( None, line.split( '\t' ) ) )
+
+ # Create features.
+ chroms_features = {}
+ for count, intervals in enumerate( feature_intervals.values() ):
+ # Sort intervals by start position.
+ intervals.sort( lambda a,b: cmp( a.start, b.start ) )
+ feature = GFFFeature( None, intervals=intervals )
+ if feature.chrom not in chroms_features:
+ chroms_features[ feature.chrom ] = []
+ chroms_features[ feature.chrom ].append( feature )
+
+ # Sort features by chrom, start position.
+ chroms_features_sorted = []
+ for chrom_features in chroms_features.values():
+ chroms_features_sorted.append( chrom_features )
+ chroms_features_sorted.sort( lambda a,b: cmp( a[0].chrom, b[0].chrom ) )
+ for features in chroms_features_sorted:
+ features.sort( lambda a,b: cmp( a.start, b.start ) )
+
+ # Yield.
+ for chrom_features in chroms_features_sorted:
+ for feature in chrom_features:
+ yield feature
+
\ No newline at end of file
diff -r a0d6e382f2e198e46613705ad8a3962d0bc58fca -r 95c05fcbbceb69e9100d8689694c22cc13d16b23 tools/filters/gff/sort_gtf.py
--- /dev/null
+++ b/tools/filters/gff/sort_gtf.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+import sys
+from galaxy import eggs
+from galaxy.datatypes.util.gff_util import read_unordered_gtf
+
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+#
+# Process inputs.
+#
+
+in_fname = sys.argv[1]
+out_fname = sys.argv[2]
+
+out = open( out_fname, 'w' )
+for feature in read_unordered_gtf( open( in_fname, 'r' ) ):
+ # Print feature.
+ for interval in feature.intervals:
+ out.write( "\t".join(interval.fields) )
+
+# TODO: print status information: how many lines processed and features found.
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4bb4d6d97649/
changeset: 4bb4d6d97649
branch: add_weblogo_requirement
user: natefoo
date: 2012-05-24 17:13:01
summary: Closed branch add_weblogo_requirement
affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/changeset/8e00c7e646b3/
changeset: 8e00c7e646b3
user: natefoo
date: 2012-05-24 17:13:32
summary: Merged add_weblogo_requirement.
affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/changeset/aad87a713970/
changeset: aad87a713970
branch: add_clustalw2_requirement
user: natefoo
date: 2012-05-24 17:14:02
summary: Closed branch add_clustalw2_requirement
affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/changeset/a0d6e382f2e1/
changeset: a0d6e382f2e1
user: natefoo
date: 2012-05-24 17:14:17
summary: Merged add_clustalw2_requirement.
affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Add ssh egg and fix a requirement for the Tool Shed.
by Bitbucket 24 May '12
by Bitbucket 24 May '12
24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a45a4d1023af/
changeset: a45a4d1023af
user: natefoo
date: 2012-05-24 16:05:55
summary: Add ssh egg and fix a requirement for the Tool Shed.
affected #: 2 files
diff -r fdb66f9df9de8ad9e0ed34f5301f13f06674fa78 -r a45a4d1023afcceee66372790fa7586f19e12b2e eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -54,6 +54,7 @@
Routes = 1.12.3
SQLAlchemy = 0.5.6
sqlalchemy_migrate = 0.5.4
+ssh = 1.7.14
SVGFig = 1.1.6
Tempita = 0.1
twill = 0.9
diff -r fdb66f9df9de8ad9e0ed34f5301f13f06674fa78 -r a45a4d1023afcceee66372790fa7586f19e12b2e lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -8,7 +8,7 @@
from galaxy import eggs
import pkg_resources
-pkg_resources.require( 'fabric' )
+pkg_resources.require( 'Fabric' )
from fabric.api import env, lcd, local
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: History API undelete bug pointed out by Ed Kirton. Resolves BB Issue #763.
by Bitbucket 24 May '12
by Bitbucket 24 May '12
24 May '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fdb66f9df9de/
changeset: fdb66f9df9de
user: dannon
date: 2012-05-24 14:18:49
summary: History API undelete bug pointed out by Ed Kirton. Resolves BB Issue #763.
affected #: 1 file
diff -r e5a406a8345b8933035ec6530643d9078aa74d72 -r fdb66f9df9de8ad9e0ed34f5301f13f06674fa78 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -153,7 +153,7 @@
POST /api/histories/deleted/{encoded_quota_id}/undelete
Undeletes a quota
"""
- history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
+ history = self.get_history( trans, id, check_ownership=True, check_accessible=False, deleted=True )
history.deleted = False
trans.sa_session.add( history )
trans.sa_session.flush()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0