galaxy-commits
Threads by month
- ----- 2026 -----
- April
- March
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6a8584218f61/
Changeset: 6a8584218f61
Branch: next-stable
User: greg
Date: 2013-06-03 18:05:24
Summary: Fix for handling tool panel section selection when installting tool shed repositories.
Affected #: 5 files
diff -r 3df028d4cab449f4ee2b5db771ec545d88e3440e -r 6a8584218f61c5cf7aa129677a7548c610cd0cca lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -388,25 +388,9 @@
tool_shed_url = installation_dict[ 'tool_shed_url' ]
# Handle contained tools.
if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section or tool_panel_section ):
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = XmlET.Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
- else:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
else:
tool_panel_section_key = None
tool_section = None
diff -r 3df028d4cab449f4ee2b5db771ec545d88e3440e -r 6a8584218f61c5cf7aa129677a7548c610cd0cca lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ b/lib/tool_shed/util/datatype_util.py
@@ -34,7 +34,7 @@
relative_path_to_datatype_file_name = None
datatype_files = datatypes_config_root.find( 'datatype_files' )
datatype_class_modules = []
- if datatype_files:
+ if datatype_files is not None:
# The <datatype_files> tag set contains any number of <datatype_file> tags.
# <datatype_files>
# <datatype_file name="gmap.py"/>
@@ -74,14 +74,14 @@
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<datatypes>\n' )
os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
- if sniffers:
+ if sniffers is not None:
os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
os.write( fd, '</datatypes>\n' )
os.close( fd )
os.chmod( proprietary_datatypes_config, 0644 )
# Load proprietary datatypes
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
- if datatype_files:
+ if datatype_files is not None:
try:
os.unlink( proprietary_datatypes_config )
except:
diff -r 3df028d4cab449f4ee2b5db771ec545d88e3440e -r 6a8584218f61c5cf7aa129677a7548c610cd0cca lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -435,14 +435,14 @@
root = tree.getroot()
repository_datatype_code_files = []
datatype_files = root.find( 'datatype_files' )
- if datatype_files:
+ if datatype_files is not None:
for elem in datatype_files.findall( 'datatype_file' ):
name = elem.get( 'name', None )
repository_datatype_code_files.append( name )
metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
datatypes = []
registration = root.find( 'registration' )
- if registration:
+ if registration is not None:
for elem in registration.findall( 'datatype' ):
converters = []
display_app_containers = []
diff -r 3df028d4cab449f4ee2b5db771ec545d88e3440e -r 6a8584218f61c5cf7aa129677a7548c610cd0cca lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -203,22 +203,15 @@
else:
# We're installing a new tool shed repository that does not yet have a database record. This repository is a repository dependency
# of a different repository being installed.
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- elif tool_panel_section:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- else:
- tool_panel_section_key = None
+ tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
+
else:
# We're installing a new tool shed repository that does not yet have a database record.
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- elif tool_panel_section:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- else:
- tool_panel_section_key = None
+ tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
tool_shed_repository = suc.create_or_update_tool_shed_repository( app=trans.app,
name=name,
description=description,
@@ -239,7 +232,7 @@
tool_panel_section_keys.append( tool_panel_section_key )
filtered_repo_info_dicts.append( repo_info_dict )
# Build repository dependency relationships even if the user chose to not install repository dependencies.
- build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
+ build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
def generate_message_for_invalid_repository_dependencies( metadata_dict ):
diff -r 3df028d4cab449f4ee2b5db771ec545d88e3440e -r 6a8584218f61c5cf7aa129677a7548c610cd0cca lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -458,6 +458,31 @@
fh.close()
return sample_files, deleted_sample_files
+def get_or_create_tool_section( trans, tool_panel_section_id, new_tool_panel_section=None ):
+ tool_panel_section_key = 'section_%s' % str( tool_panel_section_id )
+ if tool_panel_section_key in trans.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in trans.app.toolbox.tool_panel
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ log.debug( "Appending to tool panel section: %s" % str( tool_section.name ) )
+ else:
+ # Appending a new section to trans.app.toolbox.tool_panel
+ try:
+ new_tool_panel_section_name = new_tool_panel_section.name
+ except:
+ new_tool_panel_section_name = new_tool_panel_section
+ if new_tool_panel_section_name:
+ elem = XmlET.Element( 'section' )
+ elem.attrib[ 'name' ] = new_tool_panel_section_name
+ elem.attrib[ 'id' ] = tool_panel_section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = galaxy.tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
+ log.debug( "Loading new tool panel section: %s" % str( tool_section.name ) )
+ else:
+ log.debug( "Unable to create new tool pane section using received new_tool_panel_section: %s" % str( new_tool_panel_section ))
+ return None, None
+ return tool_panel_section_key, tool_section
+
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
@@ -614,8 +639,8 @@
if 'tools' in metadata:
# This forces everything to be loaded into the same section (or no section) in the tool panel.
if no_changes_checked:
- # Make sure the no_changes checkbox overrides the new_tool_panel_section if the user checked the checkbox and
- # entered something into the field.
+ # Make sure the no_changes check box overrides the new_tool_panel_section if the user checked the check box and entered something
+ # into the field.
new_tool_panel_section = None
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -630,42 +655,29 @@
original_section_id = tool_section_dict[ 'id' ]
original_section_name = tool_section_dict[ 'name' ]
if original_section_id:
- tool_panel_section_key = 'section_%s' % str( original_section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = XmlET.Element( 'section' )
- elem.attrib[ 'name' ] = original_section_name
- elem.attrib[ 'id' ] = original_section_id
- elem.attrib[ 'version' ] = ''
- tool_section = galaxy.tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
+ tool_panel_section_key, tool_section = get_or_create_tool_section( trans,
+ tool_panel_section_id=original_section_id,
+ new_tool_panel_section=new_tool_panel_section )
else:
# The user elected to change the tool panel section to contain the tools.
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = XmlET.Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = galaxy.tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
- elif tool_panel_section:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- tool_section = None
+ tool_panel_section_key, tool_section = handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
return tool_section, new_tool_panel_section, tool_panel_section_key
+def handle_tool_panel_section( trans, tool_panel_section=None, new_tool_panel_section=None ):
+ if new_tool_panel_section:
+ section_id = new_tool_panel_section.lower().replace( ' ', '_' )
+ tool_panel_section_key, tool_section = get_or_create_tool_section( trans,
+ tool_panel_section_id=section_id,
+ new_tool_panel_section=new_tool_panel_section )
+ elif tool_panel_section:
+ tool_panel_section_key = 'section_%s' % str( tool_panel_section )
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ return None, None
+ return tool_panel_section_key, tool_section
+
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision),
https://bitbucket.org/galaxy/galaxy-central/commits/83213accd759/
Changeset: 83213accd759
User: greg
Date: 2013-06-03 18:05:49
Summary: Merged from next-stable
Affected #: 5 files
diff -r 1380f4546418736a0645267868de615ac6d1b0e1 -r 83213accd759e752538c025275557d3dfc5d1433 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -388,25 +388,9 @@
tool_shed_url = installation_dict[ 'tool_shed_url' ]
# Handle contained tools.
if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section or tool_panel_section ):
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = XmlET.Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
- else:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
else:
tool_panel_section_key = None
tool_section = None
diff -r 1380f4546418736a0645267868de615ac6d1b0e1 -r 83213accd759e752538c025275557d3dfc5d1433 lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ b/lib/tool_shed/util/datatype_util.py
@@ -34,7 +34,7 @@
relative_path_to_datatype_file_name = None
datatype_files = datatypes_config_root.find( 'datatype_files' )
datatype_class_modules = []
- if datatype_files:
+ if datatype_files is not None:
# The <datatype_files> tag set contains any number of <datatype_file> tags.
# <datatype_files>
# <datatype_file name="gmap.py"/>
@@ -74,14 +74,14 @@
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<datatypes>\n' )
os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
- if sniffers:
+ if sniffers is not None:
os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
os.write( fd, '</datatypes>\n' )
os.close( fd )
os.chmod( proprietary_datatypes_config, 0644 )
# Load proprietary datatypes
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
- if datatype_files:
+ if datatype_files is not None:
try:
os.unlink( proprietary_datatypes_config )
except:
diff -r 1380f4546418736a0645267868de615ac6d1b0e1 -r 83213accd759e752538c025275557d3dfc5d1433 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -435,14 +435,14 @@
root = tree.getroot()
repository_datatype_code_files = []
datatype_files = root.find( 'datatype_files' )
- if datatype_files:
+ if datatype_files is not None:
for elem in datatype_files.findall( 'datatype_file' ):
name = elem.get( 'name', None )
repository_datatype_code_files.append( name )
metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
datatypes = []
registration = root.find( 'registration' )
- if registration:
+ if registration is not None:
for elem in registration.findall( 'datatype' ):
converters = []
display_app_containers = []
diff -r 1380f4546418736a0645267868de615ac6d1b0e1 -r 83213accd759e752538c025275557d3dfc5d1433 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -203,22 +203,15 @@
else:
# We're installing a new tool shed repository that does not yet have a database record. This repository is a repository dependency
# of a different repository being installed.
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- elif tool_panel_section:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- else:
- tool_panel_section_key = None
+ tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
+
else:
# We're installing a new tool shed repository that does not yet have a database record.
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- elif tool_panel_section:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- else:
- tool_panel_section_key = None
+ tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
tool_shed_repository = suc.create_or_update_tool_shed_repository( app=trans.app,
name=name,
description=description,
@@ -239,7 +232,7 @@
tool_panel_section_keys.append( tool_panel_section_key )
filtered_repo_info_dicts.append( repo_info_dict )
# Build repository dependency relationships even if the user chose to not install repository dependencies.
- build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
+ build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
def generate_message_for_invalid_repository_dependencies( metadata_dict ):
diff -r 1380f4546418736a0645267868de615ac6d1b0e1 -r 83213accd759e752538c025275557d3dfc5d1433 lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -458,6 +458,31 @@
fh.close()
return sample_files, deleted_sample_files
+def get_or_create_tool_section( trans, tool_panel_section_id, new_tool_panel_section=None ):
+ tool_panel_section_key = 'section_%s' % str( tool_panel_section_id )
+ if tool_panel_section_key in trans.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in trans.app.toolbox.tool_panel
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ log.debug( "Appending to tool panel section: %s" % str( tool_section.name ) )
+ else:
+ # Appending a new section to trans.app.toolbox.tool_panel
+ try:
+ new_tool_panel_section_name = new_tool_panel_section.name
+ except:
+ new_tool_panel_section_name = new_tool_panel_section
+ if new_tool_panel_section_name:
+ elem = XmlET.Element( 'section' )
+ elem.attrib[ 'name' ] = new_tool_panel_section_name
+ elem.attrib[ 'id' ] = tool_panel_section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = galaxy.tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
+ log.debug( "Loading new tool panel section: %s" % str( tool_section.name ) )
+ else:
+ log.debug( "Unable to create new tool pane section using received new_tool_panel_section: %s" % str( new_tool_panel_section ))
+ return None, None
+ return tool_panel_section_key, tool_section
+
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
@@ -614,8 +639,8 @@
if 'tools' in metadata:
# This forces everything to be loaded into the same section (or no section) in the tool panel.
if no_changes_checked:
- # Make sure the no_changes checkbox overrides the new_tool_panel_section if the user checked the checkbox and
- # entered something into the field.
+ # Make sure the no_changes check box overrides the new_tool_panel_section if the user checked the check box and entered something
+ # into the field.
new_tool_panel_section = None
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -630,42 +655,29 @@
original_section_id = tool_section_dict[ 'id' ]
original_section_name = tool_section_dict[ 'name' ]
if original_section_id:
- tool_panel_section_key = 'section_%s' % str( original_section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = XmlET.Element( 'section' )
- elem.attrib[ 'name' ] = original_section_name
- elem.attrib[ 'id' ] = original_section_id
- elem.attrib[ 'version' ] = ''
- tool_section = galaxy.tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
+ tool_panel_section_key, tool_section = get_or_create_tool_section( trans,
+ tool_panel_section_id=original_section_id,
+ new_tool_panel_section=new_tool_panel_section )
else:
# The user elected to change the tool panel section to contain the tools.
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = XmlET.Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = galaxy.tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
- elif tool_panel_section:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- tool_section = None
+ tool_panel_section_key, tool_section = handle_tool_panel_section( trans,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
return tool_section, new_tool_panel_section, tool_panel_section_key
+def handle_tool_panel_section( trans, tool_panel_section=None, new_tool_panel_section=None ):
+ if new_tool_panel_section:
+ section_id = new_tool_panel_section.lower().replace( ' ', '_' )
+ tool_panel_section_key, tool_section = get_or_create_tool_section( trans,
+ tool_panel_section_id=section_id,
+ new_tool_panel_section=new_tool_panel_section )
+ elif tool_panel_section:
+ tool_panel_section_key = 'section_%s' % str( tool_panel_section )
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ return None, None
+ return tool_panel_section_key, tool_section
+
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3df028d4cab4/
Changeset: 3df028d4cab4
Branch: next-stable
User: jgoecks
Date: 2013-06-03 15:43:42
Summary: Remove documentation for Tophat -F option because it is no longer supported.
Affected #: 2 files
diff -r 6dbe92e40de6d352b4d54ec31c89e83fafcdd532 -r 3df028d4cab449f4ee2b5db771ec545d88e3440e tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -350,7 +350,7 @@
<test><!-- Tophat commands:
bowtie2-build -f test-data/tophat_in1.fasta tophat_in1
- tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -400,7 +400,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -506,9 +506,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
diff -r 6dbe92e40de6d352b4d54ec31c89e83fafcdd532 -r 3df028d4cab449f4ee2b5db771ec545d88e3440e tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -403,7 +403,7 @@
<test><!-- Tophat commands:
bowtie-build -f test-data/tophat_in1.fasta tophat_in1
- tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -445,7 +445,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -538,9 +538,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
https://bitbucket.org/galaxy/galaxy-central/commits/1380f4546418/
Changeset: 1380f4546418
User: jgoecks
Date: 2013-06-03 15:44:02
Summary: Automated merge of next-stable
Affected #: 2 files
diff -r 716f29e4c77d2246a49c92ea205734f768369fbb -r 1380f4546418736a0645267868de615ac6d1b0e1 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -350,7 +350,7 @@
<test><!-- Tophat commands:
bowtie2-build -f test-data/tophat_in1.fasta tophat_in1
- tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat2 -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -400,7 +400,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -506,9 +506,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
diff -r 716f29e4c77d2246a49c92ea205734f768369fbb -r 1380f4546418736a0645267868de615ac6d1b0e1 tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -403,7 +403,7 @@
<test><!-- Tophat commands:
bowtie-build -f test-data/tophat_in1.fasta tophat_in1
- tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -445,7 +445,7 @@
<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters --><test><!-- TopHat commands:
- tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+ tophat -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -538,9 +538,6 @@
-m/--splice-mismatches INT The maximum number of mismatches that may appear in the "anchor" region of a spliced alignment. The default is 0.
-i/--min-intron-length INT The minimum intron length. TopHat will ignore donor/acceptor pairs closer than this many bases apart. The default is 70.
-I/--max-intron-length INT The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read. The default is 500000.
- -F/--min-isoform-fraction 0.0-1.0 TopHat filters out junctions supported by too few alignments. Suppose a junction spanning two exons, is supported by S reads. Let the average depth of coverage of
- exon A be D, and assume that it is higher than B. If S / D is less than the minimum isoform fraction, the junction is not reported. A value of zero disables the
- filter. The default is 0.15.
-g/--max-multihits INT Instructs TopHat to allow up to this many alignments to the reference for a given read, and suppresses all alignments for reads with more than this many
alignments. The default is 40.
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6dbe92e40de6/
Changeset: 6dbe92e40de6
Branch: next-stable
User: greg
Date: 2013-06-01 23:22:27
Summary: Fix for resetting all metadata on writable repositories in the tool shed when the user is not an admin.
Affected #: 1 file
diff -r 016b0b22e826625ce39621aeebccdee4d3f6a959 -r 6dbe92e40de6d352b4d54ec31c89e83fafcdd532 templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
--- a/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
+++ b/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
@@ -19,7 +19,15 @@
<div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div>
- <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}" method="post" >
+ <%
+ if trans.user_is_admin():
+ controller = 'admin'
+ action = 'reset_metadata_on_selected_repositories_in_tool_shed'
+ else:
+ controller = 'repository'
+ action = 'reset_metadata_on_my_writable_repositories_in_tool_shed'
+ %>
+ <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller=controller, action=action )}" method="post" ><div class="form-row">
Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses.
</div>
https://bitbucket.org/galaxy/galaxy-central/commits/716f29e4c77d/
Changeset: 716f29e4c77d
User: greg
Date: 2013-06-01 23:23:12
Summary: Merged from next-stable
Affected #: 1 file
diff -r f1bbaa14ddd16181c59d89a3f66d49a6e360c327 -r 716f29e4c77d2246a49c92ea205734f768369fbb templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
--- a/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
+++ b/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
@@ -19,7 +19,15 @@
<div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div>
- <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}" method="post" >
+ <%
+ if trans.user_is_admin():
+ controller = 'admin'
+ action = 'reset_metadata_on_selected_repositories_in_tool_shed'
+ else:
+ controller = 'repository'
+ action = 'reset_metadata_on_my_writable_repositories_in_tool_shed'
+ %>
+ <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller=controller, action=action )}" method="post" ><div class="form-row">
Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses.
</div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/016b0b22e826/
Changeset: 016b0b22e826
Branch: next-stable
User: greg
Date: 2013-06-01 20:29:31
Summary: Strip traling / from tool shed url when using it's value to search the Galaxy database.
Affected #: 1 file
diff -r c3fe9ba931c20975711dad6e410c9a4d0601c1fc -r 016b0b22e826625ce39621aeebccdee4d3f6a959 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -685,6 +685,8 @@
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
"""Return a tool shed repository database record that is defined by either the current changeset revision or the installed_changeset_revision."""
# This method is used only in Galaxy, not the tool shed.
+ if tool_shed.endswith( '/' ):
+ tool_shed = tool_shed.rstrip( '/' )
repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
tool_shed=tool_shed,
name=name,
https://bitbucket.org/galaxy/galaxy-central/commits/f1bbaa14ddd1/
Changeset: f1bbaa14ddd1
User: greg
Date: 2013-06-01 20:30:16
Summary: Merged from next-stable
Affected #: 1 file
diff -r 7c9738db4d0004490329a87e15091a1be02628b6 -r f1bbaa14ddd16181c59d89a3f66d49a6e360c327 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -685,6 +685,8 @@
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
"""Return a tool shed repository database record that is defined by either the current changeset revision or the installed_changeset_revision."""
# This method is used only in Galaxy, not the tool shed.
+ if tool_shed.endswith( '/' ):
+ tool_shed = tool_shed.rstrip( '/' )
repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
tool_shed=tool_shed,
name=name,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c3fe9ba931c2/
Changeset: c3fe9ba931c2
Branch: next-stable
User: Dave Bouvier
Date: 2013-05-31 22:19:21
Summary: Fix for populating environment variables referenced with the $ENV[] syntax when installing repositories with complex repository dependencies.
Affected #: 1 file
diff -r f36d687104d6c2b0b7e482181a5bf0a24740feb1 -r c3fe9ba931c20975711dad6e410c9a4d0601c1fc lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -108,7 +108,11 @@
text = cu.tool_shed_get( app, tool_shed_url, url )
return text
-def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem ):
+def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem, required_repository ):
+ """
+ Populate a list of actions for creating an env.sh file for a dependent repository. The received elem is the <package> tag set associated
+ with the tool-dependencies.xml file for one of the received tool_shed_repository's repository dependency.
+ """
action_dict = {}
actions = []
for package_elem in elem:
@@ -124,8 +128,8 @@
# Get the installation method version from a tag like: <install version="1.0">
package_install_version = package_elem.get( 'version', '1.0' )
if package_install_version == '1.0':
- # Since the required tool dependency is installed for a repository dependency, all we need to do
- # is inspect the <actions> tag set to find the <action type="set_environment"> tag.
+ # Since the required tool dependency is installed for a repository dependency, we first need to inspect the <actions> tag set to find
+ # the <action type="set_environment"> tag.
for actions_elem in package_elem:
for action_elem in actions_elem:
action_type = action_elem.get( 'type', 'shell_command' )
@@ -141,7 +145,7 @@
if env_var_dict:
env_var_dicts.append( env_var_dict )
elif action_type == 'setup_virtualenv':
- # Add the virtualenv's site-pacakges to PYTHONPATH and bin to PATH. This is a bit hackish.
+ # Add the virtualenv's site-packages to PYTHONPATH and bin to PATH. This is a bit hackish.
site_packages_command = "%s -c 'import os, sys; print os.path.join(sys.prefix, \"lib\", \"python\" + sys.version[:3], \"site-packages\")'" % os.path.join( install_dir, "venv", "bin", "python" )
output = fabric_util.handle_command( app, tool_dependency, install_dir, site_packages_command, return_output=True )
if output.return_code:
@@ -152,7 +156,37 @@
env_var_dicts.append( dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) )
env_var_dicts.append( dict( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'venv', 'bin' ) ) )
if env_var_dicts:
- action_dict[ 'environment_variable' ] = env_var_dicts
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
+ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # Handle the case where we have an installed required repository due to the prior_installation_required = True
+ # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
+ # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
+ # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
+ # file generated for the installed required repository. Each env_var_dict currently looks something like this:
+ # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
+ # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
+ # with the associated value in the env.sh file.
+ new_env_var_dicts = []
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
+ env_var_dict = env_var_dicts[ i ]
+ action = env_var_dict.get( 'action', None )
+ name = env_var_dict.get( 'name', None )
+ value = env_var_dict.get( 'value', None )
+ if action and name and value:
+ new_value = parse_env_shell_entry( action, name, value, line )
+ env_var_dict[ 'value' ] = new_value
+ new_env_var_dicts.append( env_var_dict )
+ action_dict[ 'environment_variable' ] = new_env_var_dicts
+ else:
+ action_dict[ 'environment_variable' ] = env_var_dicts
actions.append( ( 'set_environment', action_dict ) )
return tool_dependency, actions
else:
@@ -253,6 +287,7 @@
dependent_install_dir=dependent_install_dir,
required_install_dir=required_repository_package_install_dir,
tool_shed_repository=tool_shed_repository,
+ required_repository=required_repository,
package_name=package_name,
package_version=package_version,
tool_dependencies_config=config_to_use )
@@ -324,13 +359,11 @@
sa_session = app.model.context.current
def evaluate_template( text ):
- """ Substitute variables defined in XML blocks obtained loaded from
- dependencies file. """
+ """ Substitute variables defined in XML blocks obtained loaded from dependencies file. """
+ # # Added for compatibility with CloudBioLinux.
# TODO: Add tool_version substitution for compat with CloudBioLinux.
- substitutions = {
- "INSTALL_DIR": install_dir,
- "system_install": install_dir, # Added for compat with CloudBioLinux
- }
+ substitutions = { "INSTALL_DIR" : install_dir,
+ "system_install" : install_dir }
return Template( text ).safe_substitute( substitutions )
if not os.path.exists( install_dir ):
@@ -431,15 +464,7 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- action_tuple = ( action_type, action_dict )
- # If we're setting environment variables, it's redundant to set the same variable to the same value more than once,
- # so we only append it to the actions list if it isn't already there.
- if action_type in [ 'set_environment', 'set_environment_for_install' ]:
- if action_tuple not in actions:
- actions.append( action_tuple )
- # However, there may be cases where other action types should be executed several times with the same parameters.
- else:
- actions.append( action_tuple )
+ actions.append( ( action_type, action_dict ) )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
@@ -463,7 +488,24 @@
else:
return [ item ]
-def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, package_name, package_version, tool_dependencies_config ):
+def parse_env_shell_entry( action, name, value, line ):
+ new_value = value
+ var_name = '$%s' % name
+ tmp_value = line.split( '=' )[ 1 ]
+ if action == 'prepend_to':
+ # PATH=/test/package_rdkit_2012_12/62ebd7bb637a/rdkit/bin:$PATH; export PATH
+ new_value = tmp_value.split( ':%s' % var_name )[ 0 ]
+ elif action == 'set_to':
+ # RDBASE=test/package_rdkit_2012_12/62ebd7bb637a/rdkit; export RDBASE
+ new_value = tmp_value.split( ';' )[ 0 ]
+ elif action == 'append_to':
+ # LD_LIBRARY_PATH=$LD_LIBRARY_PATH:test/package_rdkit_2012_12/62ebd7bb637a/rdkit/lib/; export LD_LIBRARY_PATH
+ new_value = tmp_value.split( ':' )[ 1 ]
+ new_value = new_value.split( ';' )[ 0 ]
+ return new_value
+
+
+def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, required_repository, package_name, package_version, tool_dependencies_config ):
"""
Populate an actions dictionary that can be sent to fabric_util.install_and_build_package. This method handles the scenario where a tool_dependencies.xml
file defines a complex repository dependency. In this case, the tool dependency package will be installed in a separate repository and the tool dependency
@@ -495,7 +537,8 @@
tool_shed_repository=tool_shed_repository,
package_name=package_name,
package_version=package_version,
- elem=required_td_elem )
+ elem=required_td_elem,
+ required_repository=required_repository )
if actions:
actions_dict[ 'actions' ] = actions
break
@@ -629,4 +672,4 @@
parts = []
for arg in args:
parts.append( arg.strip( '/' ) )
- return '/'.join( parts )
+ return '/'.join( parts )
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/commits/7c9738db4d00/
Changeset: 7c9738db4d00
User: Dave Bouvier
Date: 2013-05-31 22:19:54
Summary: Merge in next-stable.
Affected #: 1 file
diff -r c37ea459a80b15795f85359536be883ca21fb942 -r 7c9738db4d0004490329a87e15091a1be02628b6 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -108,7 +108,11 @@
text = cu.tool_shed_get( app, tool_shed_url, url )
return text
-def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem ):
+def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem, required_repository ):
+ """
+ Populate a list of actions for creating an env.sh file for a dependent repository. The received elem is the <package> tag set associated
+ with the tool-dependencies.xml file for one of the received tool_shed_repository's repository dependency.
+ """
action_dict = {}
actions = []
for package_elem in elem:
@@ -124,8 +128,8 @@
# Get the installation method version from a tag like: <install version="1.0">
package_install_version = package_elem.get( 'version', '1.0' )
if package_install_version == '1.0':
- # Since the required tool dependency is installed for a repository dependency, all we need to do
- # is inspect the <actions> tag set to find the <action type="set_environment"> tag.
+ # Since the required tool dependency is installed for a repository dependency, we first need to inspect the <actions> tag set to find
+ # the <action type="set_environment"> tag.
for actions_elem in package_elem:
for action_elem in actions_elem:
action_type = action_elem.get( 'type', 'shell_command' )
@@ -141,7 +145,7 @@
if env_var_dict:
env_var_dicts.append( env_var_dict )
elif action_type == 'setup_virtualenv':
- # Add the virtualenv's site-pacakges to PYTHONPATH and bin to PATH. This is a bit hackish.
+ # Add the virtualenv's site-packages to PYTHONPATH and bin to PATH. This is a bit hackish.
site_packages_command = "%s -c 'import os, sys; print os.path.join(sys.prefix, \"lib\", \"python\" + sys.version[:3], \"site-packages\")'" % os.path.join( install_dir, "venv", "bin", "python" )
output = fabric_util.handle_command( app, tool_dependency, install_dir, site_packages_command, return_output=True )
if output.return_code:
@@ -152,7 +156,37 @@
env_var_dicts.append( dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) )
env_var_dicts.append( dict( name="PATH", action="prepend_to", value=os.path.join( install_dir, 'venv', 'bin' ) ) )
if env_var_dicts:
- action_dict[ 'environment_variable' ] = env_var_dicts
+ if required_repository.status in [ app.model.ToolShedRepository.installation_status.INSTALLED,
+ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # Handle the case where we have an installed required repository due to the prior_installation_required = True
+ # setting in the received tool_shed_repository's tool_dependencies.xml file and the required repository's
+ # tool_dependencies.xml file may include the use of the $ENV[] variable inheritance feature. To handle this,
+ # we will replace the current "value" entries in each env_var_dict with the actual path taken from the env.sh
+ # file generated for the installed required repository. Each env_var_dict currently looks something like this:
+ # {'action': 'append_to', 'name': 'LD_LIBRARY_PATH', 'value': '$BOOST_ROOT_DIR/lib/'}
+ # We'll read the contents of the received required_repository's env.sh file and replace the 'value' entry of each env_var_dict
+ # with the associated value in the env.sh file.
+ new_env_var_dicts = []
+ env_sh_file_dir = get_tool_dependency_install_dir( app=app,
+ repository_name=required_repository.name,
+ repository_owner=required_repository.owner,
+ repository_changeset_revision=required_repository.installed_changeset_revision,
+ tool_dependency_type='package',
+ tool_dependency_name=package_name,
+ tool_dependency_version=package_version )
+ env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+ for i, line in enumerate( open( env_sh_file_path, 'r' ) ):
+ env_var_dict = env_var_dicts[ i ]
+ action = env_var_dict.get( 'action', None )
+ name = env_var_dict.get( 'name', None )
+ value = env_var_dict.get( 'value', None )
+ if action and name and value:
+ new_value = parse_env_shell_entry( action, name, value, line )
+ env_var_dict[ 'value' ] = new_value
+ new_env_var_dicts.append( env_var_dict )
+ action_dict[ 'environment_variable' ] = new_env_var_dicts
+ else:
+ action_dict[ 'environment_variable' ] = env_var_dicts
actions.append( ( 'set_environment', action_dict ) )
return tool_dependency, actions
else:
@@ -253,6 +287,7 @@
dependent_install_dir=dependent_install_dir,
required_install_dir=required_repository_package_install_dir,
tool_shed_repository=tool_shed_repository,
+ required_repository=required_repository,
package_name=package_name,
package_version=package_version,
tool_dependencies_config=config_to_use )
@@ -324,13 +359,11 @@
sa_session = app.model.context.current
def evaluate_template( text ):
- """ Substitute variables defined in XML blocks obtained loaded from
- dependencies file. """
+ """ Substitute variables defined in XML blocks obtained loaded from dependencies file. """
+ # # Added for compatibility with CloudBioLinux.
# TODO: Add tool_version substitution for compat with CloudBioLinux.
- substitutions = {
- "INSTALL_DIR": install_dir,
- "system_install": install_dir, # Added for compat with CloudBioLinux
- }
+ substitutions = { "INSTALL_DIR" : install_dir,
+ "system_install" : install_dir }
return Template( text ).safe_substitute( substitutions )
if not os.path.exists( install_dir ):
@@ -431,15 +464,7 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- action_tuple = ( action_type, action_dict )
- # If we're setting environment variables, it's redundant to set the same variable to the same value more than once,
- # so we only append it to the actions list if it isn't already there.
- if action_type in [ 'set_environment', 'set_environment_for_install' ]:
- if action_tuple not in actions:
- actions.append( action_tuple )
- # However, there may be cases where other action types should be executed several times with the same parameters.
- else:
- actions.append( action_tuple )
+ actions.append( ( action_type, action_dict ) )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
@@ -463,7 +488,24 @@
else:
return [ item ]
-def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, package_name, package_version, tool_dependencies_config ):
+def parse_env_shell_entry( action, name, value, line ):
+ new_value = value
+ var_name = '$%s' % name
+ tmp_value = line.split( '=' )[ 1 ]
+ if action == 'prepend_to':
+ # PATH=/test/package_rdkit_2012_12/62ebd7bb637a/rdkit/bin:$PATH; export PATH
+ new_value = tmp_value.split( ':%s' % var_name )[ 0 ]
+ elif action == 'set_to':
+ # RDBASE=test/package_rdkit_2012_12/62ebd7bb637a/rdkit; export RDBASE
+ new_value = tmp_value.split( ';' )[ 0 ]
+ elif action == 'append_to':
+ # LD_LIBRARY_PATH=$LD_LIBRARY_PATH:test/package_rdkit_2012_12/62ebd7bb637a/rdkit/lib/; export LD_LIBRARY_PATH
+ new_value = tmp_value.split( ':' )[ 1 ]
+ new_value = new_value.split( ';' )[ 0 ]
+ return new_value
+
+
+def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, required_repository, package_name, package_version, tool_dependencies_config ):
"""
Populate an actions dictionary that can be sent to fabric_util.install_and_build_package. This method handles the scenario where a tool_dependencies.xml
file defines a complex repository dependency. In this case, the tool dependency package will be installed in a separate repository and the tool dependency
@@ -495,7 +537,8 @@
tool_shed_repository=tool_shed_repository,
package_name=package_name,
package_version=package_version,
- elem=required_td_elem )
+ elem=required_td_elem,
+ required_repository=required_repository )
if actions:
actions_dict[ 'actions' ] = actions
break
@@ -629,4 +672,4 @@
parts = []
for arg in args:
parts.append( arg.strip( '/' ) )
- return '/'.join( parts )
+ return '/'.join( parts )
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f36d687104d6/
Changeset: f36d687104d6
Branch: next-stable
User: Dave Bouvier
Date: 2013-05-31 18:53:56
Summary: Fix for redundant environment settings in env.sh when set_environment actions are distributed across multiple action sections.
Affected #: 1 file
diff -r 835eb008fbabed538d7b10081e05680757646edd -r f36d687104d6c2b0b7e482181a5bf0a24740feb1 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -431,7 +431,15 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- actions.append( ( action_type, action_dict ) )
+ action_tuple = ( action_type, action_dict )
+ # If we're setting environment variables, it's redundant to set the same variable to the same value more than once,
+ # so we only append it to the actions list if it isn't already there.
+ if action_type in [ 'set_environment', 'set_environment_for_install' ]:
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ # However, there may be cases where other action types should be executed several times with the same parameters.
+ else:
+ actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
https://bitbucket.org/galaxy/galaxy-central/commits/c37ea459a80b/
Changeset: c37ea459a80b
User: Dave Bouvier
Date: 2013-05-31 18:54:26
Summary: Merge in next-stable.
Affected #: 1 file
diff -r 2e324d02a9633e9b7e0ee5d6535e76afef57d934 -r c37ea459a80b15795f85359536be883ca21fb942 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -431,7 +431,15 @@
else:
log.debug( "Unsupported action type '%s'. Not proceeding." % str( action_type ) )
raise Exception( "Unsupported action type '%s' in tool dependency definition." % str( action_type ) )
- actions.append( ( action_type, action_dict ) )
+ action_tuple = ( action_type, action_dict )
+ # If we're setting environment variables, it's redundant to set the same variable to the same value more than once,
+ # so we only append it to the actions list if it isn't already there.
+ if action_type in [ 'set_environment', 'set_environment_for_install' ]:
+ if action_tuple not in actions:
+ actions.append( action_tuple )
+ # However, there may be cases where other action types should be executed several times with the same parameters.
+ else:
+ actions.append( action_tuple )
if actions:
actions_dict[ 'actions' ] = actions
if proprietary_fabfile_path:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Add bucket_default userData (param-based only) to cloudlaunch.
by commits-noreply@bitbucket.org 31 May '13
by commits-noreply@bitbucket.org 31 May '13
31 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2e324d02a963/
Changeset: 2e324d02a963
User: dannon
Date: 2013-05-31 17:55:46
Summary: Add bucket_default userData (param-based only) to cloudlaunch.
Affected #: 2 files
diff -r b27fc53fd92f7c306530fd878177c403dd3b1882 -r 2e324d02a9633e9b7e0ee5d6535e76afef57d934 lib/galaxy/webapps/galaxy/controllers/cloudlaunch.py
--- a/lib/galaxy/webapps/galaxy/controllers/cloudlaunch.py
+++ b/lib/galaxy/webapps/galaxy/controllers/cloudlaunch.py
@@ -35,8 +35,12 @@
BaseUIController.__init__(self, app)
@web.expose
- def index(self, trans, share_string=None, ami=None):
- return trans.fill_template("cloud/index.mako", default_keypair = DEFAULT_KEYPAIR, share_string=share_string, ami=ami)
+ def index(self, trans, share_string=None, ami=None, bucket_default = None):
+ return trans.fill_template("cloud/index.mako",
+ default_keypair = DEFAULT_KEYPAIR,
+ share_string=share_string,
+ ami=ami,
+ bucket_default=bucket_default)
@web.expose
def get_account_info(self, trans, key_id, secret, **kwargs):
@@ -92,7 +96,7 @@
return to_json_string(account_info)
@web.expose
- def launch_instance(self, trans, cluster_name, password, key_id, secret, instance_type, share_string, keypair, ami=DEFAULT_AMI, zone=None, **kwargs):
+ def launch_instance(self, trans, cluster_name, password, key_id, secret, instance_type, share_string, keypair, ami=DEFAULT_AMI, zone=None, bucket_default=None, **kwargs):
ec2_error = None
try:
# Create security group & key pair used when starting an instance
@@ -113,6 +117,8 @@
user_provided_data['password'] = password
if share_string:
user_provided_data['share_string'] = share_string
+ if bucket_default:
+ user_provided_data['bucket_default'] = bucket_default
rs = run_instance(ec2_conn=ec2_conn,
image_id = ami,
diff -r b27fc53fd92f7c306530fd878177c403dd3b1882 -r 2e324d02a9633e9b7e0ee5d6535e76afef57d934 templates/webapps/galaxy/cloud/index.mako
--- a/templates/webapps/galaxy/cloud/index.mako
+++ b/templates/webapps/galaxy/cloud/index.mako
@@ -250,6 +250,10 @@
<input type='hidden' name='ami' value='${ami}'/>
%endif
+ %if bucket_default:
+ <input type='hidden' name='bucket_default' value='${bucket_default}'/>
+ %endif
+
<div class="form-row"><label for="id_instance_type">Instance Type</label><select name="instance_type" id="id_instance_type">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: workflow: fix border color for selected workflow nodes and slight change in node position on selection/unselection
by commits-noreply@bitbucket.org 31 May '13
by commits-noreply@bitbucket.org 31 May '13
31 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b27fc53fd92f/
Changeset: b27fc53fd92f
User: james_taylor
Date: 2013-05-31 17:01:01
Summary: workflow: fix border color for selected workflow nodes and slight change in node position on selection/unselection
Affected #: 2 files
diff -r 7b4ce21a008f1140cdcd042ad4a286f362de0641 -r b27fc53fd92f7c306530fd878177c403dd3b1882 static/style/base.less
--- a/static/style/base.less
+++ b/static/style/base.less
@@ -652,6 +652,10 @@
.toolForm.toolFormInCanvas {
border: solid @form_border 1px;
+ &.toolForm-active {
+ border: solid @blue 3px;
+ margin: 4px;
+ }
.toolFormTitle {
font-size: @baseFontSize;
line-height: @baseLineHeight;
diff -r 7b4ce21a008f1140cdcd042ad4a286f362de0641 -r b27fc53fd92f7c306530fd878177c403dd3b1882 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -969,7 +969,8 @@
div.toolFormDisabled{border-color:#999999;}
div.toolHelp{margin-top:15px;padding:5px;}
div.toolHelpBody{width:100%;}
-.toolForm.toolFormInCanvas{border:solid #d8b365 1px;}.toolForm.toolFormInCanvas .toolFormTitle{font-size:12px;line-height:16px;}
+.toolForm.toolFormInCanvas{border:solid #d8b365 1px;}.toolForm.toolFormInCanvas.toolForm-active{border:solid blue 3px;margin:4px;}
+.toolForm.toolFormInCanvas .toolFormTitle{font-size:12px;line-height:16px;}
div.form-title,div.toolFormTitle{font-size:15px;line-height:24px;padding:5px 10px;background:#ebd9b2;border-bottom:solid #d8b365 1px;}
div.form-body{padding:5px 0;}
div.form-row{padding:5px 10px;}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/835eb008fbab/
Changeset: 835eb008fbab
Branch: next-stable
User: jgoecks
Date: 2013-05-30 23:38:57
Summary: During workflow export, update workflow modules before visiting inputs to avoid errors resulting from parameter changes.
Affected #: 1 file
diff -r fa04c813f5c9ddfaae1ed3e9896591128a07780d -r 835eb008fbabed538d7b10081e05680757646edd lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1756,6 +1756,9 @@
def callback( input, value, prefixed_name, prefixed_label ):
if isinstance( input, DataToolParameter ):
data_input_names[ prefixed_name ] = True
+
+ # FIXME: this updates modules silently right now; messages from updates should be provided.
+ module.check_and_update_state()
visit_input_values( module.tool.inputs, module.state.inputs, callback )
# Filter
# FIXME: this removes connection without displaying a message currently!
https://bitbucket.org/galaxy/galaxy-central/commits/7b4ce21a008f/
Changeset: 7b4ce21a008f
User: jgoecks
Date: 2013-05-30 23:39:22
Summary: Automated merge with next-stable
Affected #: 1 file
diff -r 460bdfc91d32cf6692ff3dc1837b2e3d981dd1a2 -r 7b4ce21a008f1140cdcd042ad4a286f362de0641 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1756,6 +1756,9 @@
def callback( input, value, prefixed_name, prefixed_label ):
if isinstance( input, DataToolParameter ):
data_input_names[ prefixed_name ] = True
+
+ # FIXME: this updates modules silently right now; messages from updates should be provided.
+ module.check_and_update_state()
visit_input_values( module.tool.inputs, module.state.inputs, callback )
# Filter
# FIXME: this removes connection without displaying a message currently!
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fa04c813f5c9/
Changeset: fa04c813f5c9
Branch: next-stable
User: jgoecks
Date: 2013-05-30 23:17:23
Summary: Enhance unicodify to correctly handle non-string input.
Affected #: 1 file
diff -r 69d502188765dacd378a5ac75b49d2b0a0b33032 -r fa04c813f5c9ddfaae1ed3e9896591128a07780d lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -480,10 +480,11 @@
"""
Returns a unicode string or None
"""
+
if isinstance( value, unicode ):
return value
try:
- return unicode( value, encoding, error )
+ return unicode( str( value ), encoding, error )
except:
return default
https://bitbucket.org/galaxy/galaxy-central/commits/460bdfc91d32/
Changeset: 460bdfc91d32
User: jgoecks
Date: 2013-05-30 23:17:48
Summary: Automated merge with next-stable
Affected #: 1 file
diff -r babedb4715aaa3afeeebfe162f9e089625b1533a -r 460bdfc91d32cf6692ff3dc1837b2e3d981dd1a2 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -480,10 +480,11 @@
"""
Returns a unicode string or None
"""
+
if isinstance( value, unicode ):
return value
try:
- return unicode( value, encoding, error )
+ return unicode( str( value ), encoding, error )
except:
return default
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0