galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2011
- 1 participants
- 92 discussions
16 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f37d9a0a1afc/
changeset: f37d9a0a1afc
user: greg
date: 2011-12-16 17:19:29
summary: Enhance the datatypes registry to: (a) enable use of config files that do not include a "sniffers" tag set section, (b) not load a datatypes if the registry already includes a datatype with that extension.
affected #: 1 file
diff -r ef4a1377fcc62bad6feb11df126339c639b7e56a -r f37d9a0a1afc72945e1283195739fc31418dc934 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -62,7 +62,10 @@
mimetype = elem.get( 'mimetype', None )
display_in_upload = elem.get( 'display_in_upload', False )
make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
- if extension and ( dtype or type_extension ):
+ if extension and extension in self.datatypes_by_extension:
+ self.log.debug( "Ignoring datatype with extension '%s' from '%s' because the registry already includes a datatype with that extension." \
+ % ( extension, config ) )
+ elif extension and ( dtype or type_extension ):
if dtype:
fields = dtype.split( ':' )
datatype_module = fields[0]
@@ -142,23 +145,24 @@
d_type1.add_display_application( display_app )
# Load datatype sniffers from the config
sniffers = root.find( 'sniffers' )
- for elem in sniffers.findall( 'sniffer' ):
- dtype = elem.get( 'type', None )
- if dtype:
- try:
- fields = dtype.split( ":" )
- datatype_module = fields[0]
- datatype_class = fields[1]
- module = __import__( datatype_module )
- for comp in datatype_module.split('.')[1:]:
- module = getattr(module, comp)
- aclass = getattr( module, datatype_class )()
- self.sniff_order.append( aclass )
- self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
- except Exception, exc:
- self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
- #default values
- if len(self.datatypes_by_extension) < 1:
+ if sniffers:
+ for elem in sniffers.findall( 'sniffer' ):
+ dtype = elem.get( 'type', None )
+ if dtype:
+ try:
+ fields = dtype.split( ":" )
+ datatype_module = fields[0]
+ datatype_class = fields[1]
+ module = __import__( datatype_module )
+ for comp in datatype_module.split('.')[1:]:
+ module = getattr(module, comp)
+ aclass = getattr( module, datatype_class )()
+ self.sniff_order.append( aclass )
+ self.log.debug( 'Loaded sniffer for datatype: %s' % dtype )
+ except Exception, exc:
+ self.log.warning( 'Error appending datatype %s to sniff_order, problem: %s' % ( dtype, str( exc ) ) )
+ # Default values.
+ if not self.datatypes_by_extension:
self.datatypes_by_extension = {
'ab1' : binary.Ab1(),
'axt' : sequence.Axt(),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Custom builds: (a) reinstitute support for len files/len entries and (b) improve UI so that it is easy to specify build definition and help is accurate.
by Bitbucket 15 Dec '11
by Bitbucket 15 Dec '11
15 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ef4a1377fcc6/
changeset: ef4a1377fcc6
user: jgoecks
date: 2011-12-15 22:21:39
summary: Custom builds: (a) reinstitute support for len files/len entries and (b) improve UI so that it is easy to specify build definition and help is accurate.
affected #: 2 files
diff -r fdcdc0115a123aa40bc46e69df769f171837534e -r ef4a1377fcc62bad6feb11df126339c639b7e56a lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -1198,14 +1198,57 @@
# Add new custom build.
name = kwds.get('name', '')
key = kwds.get('key', '')
- dataset_id = kwds.get('dataset_id', '')
- if not name or not key or not dataset_id:
+
+ # Look for build's chrom info in len_file and len_text.
+ len_file = kwds.get( 'len_file', None )
+ if getattr( len_file, "file", None ): # Check if it's a FieldStorage object
+ len_text = len_file.file.read()
+ else:
+ len_text = kwds.get( 'len_text', None )
+
+ if not len_text:
+ # Using FASTA from history.
+ dataset_id = kwds.get('dataset_id', '')
+
+ if not name or not key or not ( len_text or dataset_id ):
message = "You must specify values for all the fields."
elif key in dbkeys:
message = "There is already a custom build with that key. Delete it first if you want to replace it."
else:
- dataset_id = trans.security.decode_id( dataset_id )
- dbkeys[key] = { "name": name, "fasta": dataset_id }
+ # Have everything needed; create new build.
+ build_dict = { "name": name }
+ if len_text:
+ # Create new len file
+ new_len = trans.app.model.HistoryDatasetAssociation( extension="len", create_dataset=True, sa_session=trans.sa_session )
+ trans.sa_session.add( new_len )
+ new_len.name = name
+ new_len.visible = False
+ new_len.state = trans.app.model.Job.states.OK
+ new_len.info = "custom build .len file"
+ trans.sa_session.flush()
+ counter = 0
+ f = open(new_len.file_name, "w")
+ # LEN files have format:
+ # <chrom_name><tab><chrom_length>
+ for line in len_text.split("\n"):
+ lst = line.strip().rsplit(None, 1) # Splits at the last whitespace in the line
+ if not lst or len(lst) < 2:
+ lines_skipped += 1
+ continue
+ chrom, length = lst[0], lst[1]
+ try:
+ length = int(length)
+ except ValueError:
+ lines_skipped += 1
+ continue
+ counter += 1
+ f.write("%s\t%s\n" % (chrom, length))
+ f.close()
+ build_dict.update( { "len": new_len.id, "count": counter } )
+ else:
+ dataset_id = trans.security.decode_id( dataset_id )
+ build_dict[ "fasta" ] = dataset_id
+ dbkeys[key] = build_dict
# Save builds.
# TODO: use database table to save builds.
user.preferences['dbkeys'] = to_json_string(dbkeys)
diff -r fdcdc0115a123aa40bc46e69df769f171837534e -r ef4a1377fcc62bad6feb11df126339c639b7e56a templates/user/dbkeys.mako
--- a/templates/user/dbkeys.mako
+++ b/templates/user/dbkeys.mako
@@ -40,6 +40,15 @@
padding: 10px;
}
%endif
+ div.def_tab {
+ float: left;
+ padding: 0.2em 0.5em;
+ background-color: white;
+ }
+ div.def_tab.active {
+ background-color: #CCF;
+ border: solid 1px #66A;
+ }
</style></%def>
@@ -60,6 +69,43 @@
$("#show_installed_builds").click(function() {
$("#installed_builds").show();
});
+
+ // Set up behavior for build definition tab controls.
+ $("div.def_tab > a").each(function() {
+ $(this).click(function() {
+ var tab_id = $(this).attr("id");
+
+ // Hide all build inputs, help.
+ $("div.build_definition").children(":input").hide();
+ $(".infomessagesmall > div").hide();
+
+ // Show input item, help corresponding to tab id.
+ $("#" + tab_id + "_input").show();
+ $("." + tab_id + "_help").show();
+
+ // Update tabs.
+ $("div.def_tab").removeClass("active");
+ $(this).parent().addClass("active");
+ });
+ });
+
+ ## If there are fasta HDAs available, show fasta tab; otherwise show len file tab.
+ // Set starting tab.
+ % if fasta_hdas.first():
+ $("#fasta").click();
+ % else:
+ $("#len_file").click();
+ % endif
+
+ // Before submit, remove inputs not associated with the active tab.
+ $("#submit").click(function() {
+ var id = $(".active > a").attr("id");
+ $("div.build_definition").children(":input").each(function() {
+ if ( $(this).attr("id") !== (id + "_input") ) {
+ $(this).remove();
+ }
+ });
+ });
});
</script>
@@ -70,7 +116,6 @@
</%def><%def name="body()">
-
% if message:
<div class="errormessagelarge">${message}</div>
% elif lines_skipped > 0:
@@ -123,50 +168,82 @@
<hr /><h3>Add a Custom Build</h3><form action="dbkeys" method="post" enctype="multipart/form-data">
+ ## Custom build via fasta in history.
<div class="toolForm" style="float: left;"><div class="toolFormTitle">New Build</div><div class="toolFormBody"><div class="form-row">
- <label for="name">Build Name (eg: Mouse):</label>
+ <label for="name">Name (eg: Hamster):</label><input type="text" id="name" name="name" /></div><div class="form-row">
- <label for="key">Build Key (eg: mm9):</label>
+ <label for="key">Key (eg: hamster_v1):</label><input type="text" id="key" name="key" /></div>
- <div class="form-row">
- <label for="len_file">Build Genome:</label>
- <select name="dataset_id">
+ <div class="form-row build_definition">
+ <label>Definition:</label>
+ <div class="def_tab">
+ <a id="fasta" href="javascript:void(0)">FASTA</a>
+ </div>
+ <div class="def_tab">
+ <a id="len_file" href="javascript:void(0)">Len File</a>
+ </div>
+ <div class="def_tab">
+ <a id="len_entry" href="javascript:void(0)">Len Entry</a>
+ </div>
+ <div style="clear: both; padding-bottom: 0.5em"></div>
+ <select id="fasta_input" name="dataset_id">
%for dataset in fasta_hdas:
<option value="${trans.security.encode_id( dataset.id )}">${dataset.hid}: ${dataset.name}</option>
%endfor
</select>
- </div>
-
- <div class="form-row"><input type="submit" name="add" value="Submit"/></div>
+ <input type="file" id="len_file_input" name="len_file" /></input>
+ <textarea id="len_entry_input" name="len_text" cols="30" rows="8"></textarea>
+ </div>
+ <div class="form-row"><input id="submit" type="submit" name="add" value="Submit"/></div></div></div></form><div class="infomessagesmall" style="float: left; margin-left: 10px; width: 40%;">
- <h3>Length Format</h3>
- <p>
- The length format is two-column, separated by whitespace, of the form:
- <pre>chrom/contig length of chrom/contig</pre>
- </p>
- <p>
- For example, the first few entries of <em>mm9.len</em> are as follows:
- <pre>
-chr1 197195432
-chr2 181748087
-chr3 159599783
-chr4 155630120
-chr5 152537259
- </pre>
- </p>
+ <div class="fasta_help">
+ <h3>FASTA format</h3>
+ <p>
+ This is a multi-fasta file from your current history that provides the genome
+ sequences for each chromosome/contig in your build.
+ </p>
+
+ <p>
+ Here is a snippet from an example multi-fasta file:
+ <pre>
+ >chr1
+ ATTATATATAAGACCACAGAGAGAATATTTTGCCCGG...
+ >chr2
+ GGCGGCCGCGGCGATATAGAACTACTCATTATATATA...
+ ...
+ </pre>
+ </p>
+ </div>
+ <div class="len_file_help len_entry_help">
+ <h3>Length Format</h3>
+ <p>
+ The length format is two-column, separated by whitespace, of the form:
+ <pre>chrom/contig length of chrom/contig</pre>
+ </p>
+ <p>
+ For example, the first few entries of <em>mm9.len</em> are as follows:
+ <pre>
+ chr1 197195432
+ chr2 181748087
+ chr3 159599783
+ chr4 155630120
+ chr5 152537259
+ </pre>
+ </p>
- <p>Trackster uses this information to populate the select box for chrom/contig, and
- to set the maximum basepair of the track browser. You may either upload a .len file
- of this format, or directly enter the information into the box.</p>
-
+ <p>Trackster uses this information to populate the select box for chrom/contig, and
+ to set the maximum basepair of the track browser. You may either upload a .len file
+ of this format (Len File option), or directly enter the information into the box
+ (Len Entry option).</p>
+ </div></div></%def>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Reset repository metadata for installed tool shed repositories when a new update to the repository contents has been pulled from the tool shed.
by Bitbucket 15 Dec '11
by Bitbucket 15 Dec '11
15 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fdcdc0115a12/
changeset: fdcdc0115a12
user: greg
date: 2011-12-15 16:49:40
summary: Reset repository metadata for installed tool shed repositories when a new update to the repository contents has been pulled from the tool shed.
affected #: 3 files
diff -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 -r fdcdc0115a123aa40bc46e69df769f171837534e lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -73,18 +73,19 @@
if returncode == 0:
returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
- metadata_dict = load_repository_contents( self.app,
- name,
- description,
- self.repository_owner,
- changeset_revision,
- repository_clone_url,
- self.install_tool_config,
- self.tool_path,
- tool_section,
- relative_install_dir,
- current_working_dir,
- tmp_name )
+ metadata_dict = load_repository_contents( app=self.app,
+ name=name,
+ description=description,
+ owner=self.repository_owner,
+ changeset_revision=changeset_revision,
+ tool_path=self.tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_section=tool_section,
+ shed_tool_conf=self.install_tool_config,
+ new_install=True )
# Add a new record to the tool_id_guid_map table for each
# tool in the repository if one doesn't already exist.
if 'tools' in metadata_dict:
diff -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 -r fdcdc0115a123aa40bc46e69df769f171837534e lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,6 +2,7 @@
from datetime import date, datetime, timedelta
from time import strftime
from galaxy import util
+from galaxy.util.json import *
from galaxy.tools import ToolSection
from galaxy.tools.search import ToolBoxSearch
from galaxy.model.orm import *
@@ -77,7 +78,7 @@
os.chdir( current_working_dir )
tmp_stderr.close()
return returncode, tmp_name
-def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ):
+def create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ):
# This method is used by the InstallManager, which does not have access to trans.
sa_session = app.model.context.current
tmp_url = clean_repository_clone_url( repository_clone_url )
@@ -85,16 +86,13 @@
if not owner:
owner = get_repository_owner( tmp_url )
includes_datatypes = 'datatypes_config' in metadata_dict
- flush_needed = False
tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
if tool_shed_repository:
- if tool_shed_repository.deleted:
- tool_shed_repository.description = description
- tool_shed_repository.changeset_revision = changeset_revision
- tool_shed_repository.metadata = metadata_dict
- tool_shed_repository.includes_datatypes = includes_datatypes
- tool_shed_repository.deleted = False
- flush_needed = True
+ tool_shed_repository.description = description
+ tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.metadata = metadata_dict
+ tool_shed_repository.includes_datatypes = includes_datatypes
+ tool_shed_repository.deleted = False
else:
tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
name=name,
@@ -104,10 +102,8 @@
changeset_revision=changeset_revision,
metadata=metadata_dict,
includes_datatypes=includes_datatypes )
- flush_needed = True
- if flush_needed:
- sa_session.add( tool_shed_repository )
- sa_session.flush()
+ sa_session.add( tool_shed_repository )
+ sa_session.flush()
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""
Update the received metadata_dict with changes that have been applied
@@ -471,12 +467,17 @@
except Exception, e:
log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) )
app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_module=imported_module )
-def load_repository_contents( app, name, description, owner, changeset_revision, repository_clone_url, shed_tool_conf,
- tool_path, tool_section, relative_install_dir, current_working_dir, tmp_name ):
+def load_repository_contents( app, name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir,
+ current_working_dir, tmp_name, tool_section=None, shed_tool_conf=None, new_install=True ):
# This method is used by the InstallManager, which does not have access to trans.
# Generate the metadata for the installed tool shed repository. It is imperative that
# the installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
+ # is set because the process for setting metadata uses the repository files on disk. This
+ # method is called when new tools have been installed (in which case values should be received
+ # for tool_section and shed_tool_conf, and new_install should be left at it's default value)
+ # and when updates have been pulled to previously installed repositories (in which case the
+ # default value None is set for tool_section and shed_tool_conf, and the value of new_install
+ # is passed as False).
metadata_dict = generate_metadata( app.toolbox, relative_install_dir, repository_clone_url )
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
@@ -497,42 +498,38 @@
repository_tools_tups = handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups )
# Handle tools that use fabric scripts to install dependencies.
handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
- # Generate a new entry for the tool config.
- elem_list = generate_tool_panel_elem_list( name,
- repository_clone_url,
- changeset_revision,
- repository_tools_tups,
- tool_section=tool_section,
- owner=owner )
- if tool_section:
- for section_elem in elem_list:
- # Load the section into the tool panel.
- app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path )
- else:
- # Load the tools into the tool panel outside of any sections.
- for tool_elem in elem_list:
- guid = tool_elem.get( 'guid' )
- app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
+ if new_install:
+ # Generate a new entry for the tool config.
+ elem_list = generate_tool_panel_elem_list( name,
+ repository_clone_url,
+ changeset_revision,
+ repository_tools_tups,
+ tool_section=tool_section,
+ owner=owner )
+ if tool_section:
+ for section_elem in elem_list:
+ # Load the section into the tool panel.
+ app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path )
+ else:
+ # Load the tools into the tool panel outside of any sections.
+ for tool_elem in elem_list:
+ guid = tool_elem.get( 'guid' )
+ app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
+ for elem_entry in elem_list:
+ # Append the new entry (either section or list of tools) to the shed_tool_config file.
+ add_shed_tool_conf_entry( app, shed_tool_conf, elem_entry )
+ if app.toolbox_search.enabled:
+ # If search support for tools is enabled, index the new installed tools.
+ app.toolbox_search = ToolBoxSearch( app.toolbox )
# Remove the temporary file
try:
os.unlink( tmp_name )
except:
pass
- for elem_entry in elem_list:
- # Append the new entry (either section or list of tools) to the shed_tool_config file.
- add_shed_tool_conf_entry( app, shed_tool_conf, elem_entry )
- if app.toolbox_search.enabled:
- # If search support for tools is enabled, index the new installed tools.
- app.toolbox_search = ToolBoxSearch( app.toolbox )
# Add a new record to the tool_shed_repository table if one doesn't
# already exist. If one exists but is marked deleted, undelete it.
- log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name )
- create_or_undelete_tool_shed_repository( app,
- name,
- description,
- changeset_revision,
- repository_clone_url,
- metadata_dict )
+ log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
+ create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict )
return metadata_dict
def pretty_print_xml( elem, level=0 ):
pad = ' '
diff -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 -r fdcdc0115a123aa40bc46e69df769f171837534e lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -237,18 +237,19 @@
returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
- metadata_dict = load_repository_contents( trans.app,
- name,
- description,
- owner,
- changeset_revision,
- repository_clone_url,
- shed_tool_conf,
- tool_path,
- tool_section,
- relative_install_dir,
- current_working_dir,
- tmp_name )
+ metadata_dict = load_repository_contents( app=trans.app,
+ name=name,
+ description=description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ new_install=True )
installed_repository_names.append( name )
else:
tmp_stderr = open( tmp_name, 'rb' )
@@ -302,7 +303,7 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, kwd[ 'id' ] )
description = util.restore_text( params.get( 'description', repository.description ) )
- relative_install_dir = self.__get_relative_install_dir( trans, repository )
+ tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
if params.get( 'edit_repository_button', False ):
if description != repository.description:
@@ -351,13 +352,28 @@
message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name
else:
current_working_dir = os.getcwd()
- relative_install_dir = self.__get_relative_install_dir( trans, repository )
+ tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
if relative_install_dir:
repo_files_dir = os.path.join( relative_install_dir, name )
returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
if returncode == 0:
returncode, tmp_name = update_repository( current_working_dir, repo_files_dir, latest_changeset_revision )
if returncode == 0:
+ # Update the repository metadata.
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ metadata_dict = load_repository_contents( app=trans.app,
+ name=name,
+ description=repository.description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_section=None,
+ shed_tool_conf=None,
+ new_install=False )
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
repository.update_available = False
@@ -408,8 +424,9 @@
metadata=metadata,
message=message,
status=status )
- def __get_relative_install_dir( self, trans, repository ):
- # Get the directory where the repository is install.
+ def __get_tool_path_and_relative_install_dir( self, trans, repository ):
+ # Return both the tool_path configured in the relative shed_tool_conf and
+ # the relative path to the directory where the repository is installed.
tool_shed = clean_tool_shed_url( repository.tool_shed )
partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
@@ -420,7 +437,7 @@
relative_install_dir = os.path.join( tool_path, partial_install_dir )
if os.path.isdir( relative_install_dir ):
break
- return relative_install_dir
+ return tool_path, relative_install_dir
def __generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
Generate a tool path that guarantees repositories with the same name will always be installed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: make FeatureTracks compatible with 09c6c980e463 in preparation for enabling FeatureTracks in composite tracks.
by Bitbucket 14 Dec '11
by Bitbucket 14 Dec '11
14 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/13ba6909faae/
changeset: 13ba6909faae
user: jgoecks
date: 2011-12-14 23:36:18
summary: Trackster: make FeatureTracks compatible with 09c6c980e463 in preparation for enabling FeatureTracks in composite tracks.
affected #: 1 file
diff -r 501ef487e37f0f6ff2e6def235f3878a88d0a7c0 -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -3111,17 +3111,27 @@
if ( can_draw_now ) {
// Set up and draw tile.
extend(tile_data, more_tile_data);
+
+ // HACK: this is FeatureTrack-specific.
+ // If track mode is Auto, determine mode and update.
+ var mode = track.mode;
+ if (mode === "Auto") {
+ mode = track.get_mode(tile_data);
+ track.update_auto_mode(mode);
+ }
+
+ // Draw canvas.
var
canvas = track.view.canvas_manager.new_canvas(),
tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
width = Math.ceil( (tile_high - tile_low) * w_scale ) + track.left_offset,
- height = track.get_canvas_height(tile_data);
+ height = track.get_canvas_height(tile_data, mode, w_scale, width);
canvas.width = width;
canvas.height = height;
- var tile = track.draw_tile(tile_data, canvas, track.mode, resolution, tile_index, w_scale, seq_data);
+ var tile = track.draw_tile(tile_data, canvas, mode, resolution, tile_index, w_scale, seq_data);
// Don't cache, show if no tile.
if (tile !== undefined) {
@@ -3145,7 +3155,7 @@
* Returns canvas height needed to display data; return value is an integer that denotes the
* number of pixels required.
*/
- get_canvas_height: function(data) {
+ get_canvas_height: function(result, mode, w_scale, canvas_width) {
return this.height_px;
},
/**
@@ -3416,13 +3426,14 @@
if ( can_draw_now ) {
// Set up and draw tile.
extend(tile_data, more_tile_data);
+
var
canvas = track.view.canvas_manager.new_canvas(),
tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
width = Math.ceil( (tile_high - tile_low) * w_scale ),
- height = track.get_canvas_height(tile_data);
+ height = track.get_canvas_height(tile_data, mode, w_scale, width);
// FIXME:
// (a) right now, only LineTracks respect width/height setting and do not set it in draw_tile;
@@ -3914,6 +3925,58 @@
return {max: max_count, delta: bin_size, data: bins};
},
/**
+ * Returns appropriate display mode based on data.
+ */
+ get_mode: function(data) {
+ if (data.dataset_type === "summary_tree") {
+ mode = "summary_tree";
+ }
+ // HACK: use no_detail mode track is in overview to prevent overview from being too large.
+ else if (data.extra_info === "no_detail" || this.is_overview) {
+ mode = "no_detail";
+ }
+ else {
+ // Choose b/t Squish and Pack.
+ // Proxy measures for using Squish:
+ // (a) error message re: limiting number of features shown;
+ // (b) X number of features shown;
+ // (c) size of view shown.
+ // TODO: cannot use (a) and (b) because it requires coordinating mode across tiles;
+ // fix this so that tiles are redrawn as necessary to use the same mode.
+ //if ( (result.message && result.message.match(/^Only the first [\d]+/)) ||
+ // (result.data && result.data.length > 2000) ||
+ //var data = result.data;
+ // if ( (data.length && data.length < 4) ||
+ // (this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH) ) {
+ if ( this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH ) {
+ mode = "Squish";
+ } else {
+ mode = "Pack";
+ }
+ }
+ return mode;
+ },
+ /**
+ * Returns canvas height needed to display data; return value is an integer that denotes the
+ * number of pixels required.
+ */
+ get_canvas_height: function(result, mode, w_scale, canvas_width) {
+ if (mode === "summary_tree" || mode === "Histogram") {
+ // Extra padding at top of summary tree so label does not overlap data.
+ return this.summary_draw_height + SUMMARY_TREE_TOP_PADDING;
+ }
+ else {
+ var rows_required = 1;
+ if (mode === "no_detail" || mode === "Squish" || mode === "Pack") {
+ var rows_required = this.incremental_slots(w_scale, result.data, mode);
+ }
+ // HACK: use dummy painter to get required height. Painter should be extended so that get_required_height
+ // works as a static function.
+ var dummy_painter = new (this.painter)(null, null, null, this.prefs, mode);
+ return Math.max(MIN_TRACK_HEIGHT, dummy_painter.get_required_height(rows_required, canvas_width) );
+ }
+ },
+ /**
* Draw FeatureTrack tile.
* @param result result from server
* @param canvas canvas to draw on
@@ -3925,50 +3988,14 @@
*/
draw_tile: function(result, canvas, mode, resolution, tile_index, w_scale, ref_seq) {
var track = this,
- tile_bounds = track._get_tile_bounds(tile_index, resolution),
+ tile_bounds = this._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
- tile_span = tile_high - tile_low,
- width = Math.ceil(tile_span * w_scale),
min_height = 25,
- left_offset = this.left_offset,
- slots,
- required_height;
-
- // Set display mode if Auto.
- if (mode === "Auto") {
- if (result.dataset_type === "summary_tree") {
- mode = result.dataset_type;
- }
- // HACK: use no_detail mode track is in overview to prevent overview from being too large.
- else if (result.extra_info === "no_detail" || track.is_overview) {
- mode = "no_detail";
- }
- else {
- // Choose b/t Squish and Pack.
- // Proxy measures for using Squish:
- // (a) error message re: limiting number of features shown;
- // (b) X number of features shown;
- // (c) size of view shown.
- // TODO: cannot use (a) and (b) because it requires coordinating mode across tiles;
- // fix this so that tiles are redrawn as necessary to use the same mode.
- //if ( (result.message && result.message.match(/^Only the first [\d]+/)) ||
- // (result.data && result.data.length > 2000) ||
- var data = result.data;
- // if ( (data.length && data.length < 4) ||
- // (this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH) ) {
- if ( this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH ) {
- mode = "Squish";
- } else {
- mode = "Pack";
- }
- }
- this.update_auto_mode( mode );
- }
+ left_offset = this.left_offset;
// Drawing the summary tree (feature coverage histogram)
if (mode === "summary_tree" || mode === "Histogram") {
- required_height = this.summary_draw_height;
// Add label to container div showing maximum count
// TODO: this shouldn't be done at the tile level
this.container_div.find(".yaxislabel").remove();
@@ -3976,9 +4003,6 @@
max_label.text(result.max);
max_label.css({ position: "absolute", top: "24px", left: "10px", color: this.prefs.label_color });
max_label.prependTo(this.container_div);
- canvas.width = width + left_offset;
- // Extra padding at top of summary tree
- canvas.height = required_height + SUMMARY_TREE_TOP_PADDING;
// Get summary tree data if necessary and set max if there is one.
if (result.dataset_type != "summary_tree") {
@@ -3993,18 +4017,11 @@
var ctx = canvas.getContext("2d");
// Deal with left_offset by translating.
ctx.translate(left_offset, SUMMARY_TREE_TOP_PADDING);
- painter.draw(ctx, width, required_height);
+ painter.draw(ctx, canvas.width, canvas.height);
return new SummaryTreeTile(track, tile_index, resolution, canvas, result.data, result.max);
}
- // Start dealing with row-by-row tracks
-
- // If working with a mode where slotting is necessary, update the incremental slotting
- var slots, slots_required = 1;
- if ( mode === "no_detail" || mode === "Squish" || mode === "Pack" ) {
- slots_required = this.incremental_slots(w_scale, result.data, mode);
- slots = this.inc_slots[w_scale].slots;
- }
+ // Handle row-by-row tracks
// Filter features.
var filtered = [];
@@ -4028,16 +4045,12 @@
}
}
- // Create painter, and canvas of sufficient size to contain all features.
+ // Create painter.
var filter_alpha_scaler = (this.filters_manager.alpha_filter ? new FilterScaler(this.filters_manager.alpha_filter) : null);
var filter_height_scaler = (this.filters_manager.height_filter ? new FilterScaler(this.filters_manager.height_filter) : null);
// HACK: ref_seq will only be defined for ReadTracks, and only the ReadPainter accepts that argument
var painter = new (this.painter)(filtered, tile_low, tile_high, this.prefs, mode, filter_alpha_scaler, filter_height_scaler, ref_seq);
- var required_height = Math.max(MIN_TRACK_HEIGHT, painter.get_required_height(slots_required,width));
var feature_mapper = null;
-
- canvas.width = width + left_offset;
- canvas.height = required_height;
// console.log(( tile_low - this.view.low ) * w_scale, tile_index, w_scale);
var ctx = canvas.getContext("2d");
@@ -4048,8 +4061,9 @@
if (result.data) {
// Draw features.
+ slots = this.inc_slots[w_scale].slots;
ctx.translate(left_offset, 0);
- feature_mapper = painter.draw(ctx, width, required_height, slots);
+ feature_mapper = painter.draw(ctx, canvas.width, canvas.height, slots);
feature_mapper.translation = -left_offset;
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: small fixes so that ReferenceTracks are compatible with changes in 09c6c980e463
by Bitbucket 14 Dec '11
by Bitbucket 14 Dec '11
14 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/501ef487e37f/
changeset: 501ef487e37f
user: jgoecks
date: 2011-12-14 21:25:03
summary: Trackster: small fixes so that ReferenceTracks are compatible with changes in 09c6c980e463
affected #: 1 file
diff -r 0c804033ae5a766f1d12a2030b73fe306c27f660 -r 501ef487e37f0f6ff2e6def235f3878a88d0a7c0 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -3017,7 +3017,7 @@
var all_tiles_drawn = true;
var drawn_tiles = [];
var tile_count = 0;
- var is_tile = function(o) { return ('track' in o) };
+ var is_tile = function(o) { return (o && 'track' in o) };
// Draw or fetch and show tiles.
while ( ( tile_index * DENSITY * resolution ) < high ) {
var draw_result = this.draw_helper( force, width, tile_index, resolution, parent_element, w_scale );
@@ -3116,7 +3116,7 @@
tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
- width = Math.ceil( (tile_high - tile_low) * w_scale ),
+ width = Math.ceil( (tile_high - tile_low) * w_scale ) + track.left_offset,
height = track.get_canvas_height(tile_data);
canvas.width = width;
@@ -3483,17 +3483,14 @@
* Draw ReferenceTrack tile.
*/
draw_tile: function(seq, canvas, mode, resolution, tile_index, w_scale) {
- var track = this,
- tile_length = DENSITY * resolution;
+ var track = this;
if (w_scale > this.view.canvas_manager.char_width_px) {
if (seq.data === null) {
track.content_div.css("height", "0px");
return;
}
- var ctx = canvas.getContext("2d");
- canvas.width = Math.ceil(tile_length * w_scale + track.left_offset);
- canvas.height = track.height_px;
+ var ctx = canvas.getContext("2d");
ctx.font = ctx.canvas.manager.default_font;
ctx.textAlign = "center";
seq = seq.data;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0c804033ae5a/
changeset: 0c804033ae5a
user: greg
date: 2011-12-14 16:18:43
summary: 1. Add a new UpdateManager for use with tool shed repositories installed into a local Galaxy instance. The UpdateManager will poll all appropriate tool sheds to see if updates are available for each of the installed repositories. Polling occurs when the Galaxy server is started. In addition, a config setting tells the UpdateManager to poll after the configured number of hours have passed. If updates are available for an installed repository, a table column is updated, and the repository name is highlighted in red, alerting the Galaxy admin that updates are available for that repository.
2. Add a new ToolIdGuidMap grid that displays all of the mappings between tool ids whose tools used to be in the distribution and guids, which is the new tool id for tools that are installed with repositories from tool sheds.
3. Add a new column named installed_changeset_revsion to the tool_shed_repository table. This column is set when the repository is installed and remains static thereafter.
4. Move several tool shed related components to a new ~/lib/galaxy/tool_shed directory in the code base.
affected #: 15 files
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -3,7 +3,7 @@
from galaxy import config, jobs, util, tools, web
import galaxy.tools.search
import galaxy.tools.data
-import galaxy.tools.tool_shed_registry
+import galaxy.tool_shed.tool_shed_registry
from galaxy.web import security
import galaxy.model
import galaxy.datatypes.registry
@@ -28,7 +28,7 @@
galaxy.model.set_datatypes_registry( self.datatypes_registry )
# Set up the tool sheds registry
if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tools.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
else:
self.tool_shed_registry = None
# Determine the database url
@@ -61,8 +61,13 @@
# If enabled, check for tools missing from the distribution because they
# have been moved to the tool shed and install all such discovered tools.
if self.config.get_bool( 'enable_tool_shed_install', False ):
- from tools import install_manager
+ from tool_shed import install_manager
self.install_manager = install_manager.InstallManager( self, self.config.tool_shed_install_config, self.config.install_tool_config )
+ # If enabled, poll respective tool sheds to see if updates are
+ # available for any installed tool shed repositories.
+ if self.config.get_bool( 'enable_tool_shed_check', False ):
+ from tool_shed import update_manager
+ self.update_manager = update_manager.UpdateManager( self )
# Load datatype converters
self.datatypes_registry.load_datatype_converters( self.toolbox )
# Load history import/export tools
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -47,12 +47,12 @@
self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
self.tool_sheds_config = kwargs.get( 'tool_sheds_config_file', 'tool_sheds_conf.xml' )
- self.enable_unique_workflow_defaults = string_as_bool( kwargs.get ( 'enable_unique_workflow_defaults', False ) )
+ self.enable_unique_workflow_defaults = string_as_bool( kwargs.get( 'enable_unique_workflow_defaults', False ) )
self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
self.len_file_path = kwargs.get( "len_file_path", resolve_path(os.path.join(self.tool_data_path, 'shared','ucsc','chrom'), self.root) )
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
- self.enable_tool_shed_install = string_as_bool( kwargs.get ( 'enable_tool_shed_install', False ) )
+ self.enable_tool_shed_install = string_as_bool( kwargs.get( 'enable_tool_shed_install', False ) )
self.tool_shed_install_config = resolve_path( kwargs.get( "tool_shed_install_config_file", "tool_shed_install.xml" ), self.root )
self.install_tool_config = resolve_path( kwargs.get( "install_tool_config_file", "shed_tool_conf.xml" ), self.root )
if 'tool_config_file' in kwargs:
@@ -63,6 +63,13 @@
tcf = 'tool_conf.xml'
self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
+ self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
+ try:
+ self.hours_between_check = int( kwargs.get( 'hours_between_check', 12 ) )
+ if self.hours_between_check < 1 or self.hours_between_check > 24:
+ self.hours_between_check = 12
+ except:
+ self.hours_between_check = 12
self.tool_secret = kwargs.get( "tool_secret", "" )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2660,7 +2660,7 @@
pass
class ToolShedRepository( object ):
- def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None,
+ def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
changeset_revision=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False ):
self.id = id
self.create_time = create_time
@@ -2668,6 +2668,7 @@
self.name = name
self.description = description
self.owner = owner
+ self.installed_changeset_revision = installed_changeset_revision
self.changeset_revision = changeset_revision
self.metadata = metadata
self.includes_datatypes = includes_datatypes
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -372,6 +372,7 @@
Column( "name", TrimmedString( 255 ), index=True ),
Column( "description" , TEXT ),
Column( "owner", TrimmedString( 255 ), index=True ),
+ Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "changeset_revision", TrimmedString( 255 ), index=True ),
Column( "metadata", JSONType, nullable=True ),
Column( "includes_datatypes", Boolean, index=True, default=False ),
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
@@ -0,0 +1,63 @@
+"""
+Migration script to add the installed_changeset_revision column to the tool_shed_repository table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ col = Column( "installed_changeset_revision", TrimmedString( 255 ) )
+ try:
+ col.create( ToolShedRepository_table )
+ assert col is ToolShedRepository_table.c.installed_changeset_revision
+ except Exception, e:
+ print "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e )
+ log.debug( "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) )
+ # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision.
+ # This will be problematic if the value of changeset_revision was updated to something other than the value
+ # that it was when the repository was installed (because the install path determined in real time will attempt to
+ # find the repository using the updated changeset_revison instead of the required installed_changeset_revision),
+ # but at the time this script was written, this scenario is extremely unlikely.
+ cmd = "SELECT id AS id, " \
+ + "installed_changeset_revision AS installed_changeset_revision, " \
+ + "changeset_revision AS changeset_revision " \
+ + "FROM tool_shed_repository;"
+ tool_shed_repositories = db_session.execute( cmd ).fetchall()
+ update_count = 0
+ for row in tool_shed_repositories:
+ cmd = "UPDATE tool_shed_repository " \
+ + "SET installed_changeset_revision = '%s' " % row.changeset_revision \
+ + "WHERE changeset_revision = '%s';" % row.changeset_revision
+ db_session.execute( cmd )
+ update_count += 1
+ print "Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. "
+def downgrade():
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ try:
+ ToolShedRepository_table.c.installed_changeset_revision.drop()
+ except Exception, e:
+ print "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e )
+ log.debug( "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/__init__.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -0,0 +1,3 @@
+"""
+Classes encapsulating the relationships between Galaxy and Galaxy tool sheds.
+"""
\ No newline at end of file
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/install_manager.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -0,0 +1,160 @@
+"""
+Manage automatic installation of tools configured in tool_shed_install.xml, all of which were
+at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool
+shed. Tools included in tool_shed_install.xml that have already been installed will not be
+re-installed.
+"""
+from galaxy.util.shed_util import *
+
+log = logging.getLogger( __name__ )
+
+class InstallManager( object ):
+ def __init__( self, app, tool_shed_install_config, install_tool_config ):
+ """
+ Check tool settings in tool_shed_install_config and install all tools that are
+ not already installed. The tool panel configuration file is the received
+ shed_tool_config, which defaults to shed_tool_conf.xml.
+ """
+ self.app = app
+ self.sa_session = self.app.model.context.current
+ self.install_tool_config = install_tool_config
+ # Parse shed_tool_config to get the install location (tool_path).
+ tree = util.parse_xml( install_tool_config )
+ root = tree.getroot()
+ self.tool_path = root.get( 'tool_path' )
+ self.app.toolbox.shed_tool_confs[ install_tool_config ] = self.tool_path
+ # Parse tool_shed_install_config to check each of the tools.
+ log.debug( "Parsing tool shed install configuration %s" % tool_shed_install_config )
+ self.tool_shed_install_config = tool_shed_install_config
+ tree = util.parse_xml( tool_shed_install_config )
+ root = tree.getroot()
+ self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
+ log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) )
+ self.repository_owner = 'devteam'
+ for elem in root:
+ if elem.tag == 'repository':
+ self.install_repository( elem )
+ elif elem.tag == 'section':
+ self.install_section( elem )
+ def install_repository( self, elem, section_name='', section_id='' ):
+ # Install a single repository into the tool config. If outside of any sections, the entry looks something like:
+ # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" changeset_revision="f3ed6cfe6402">
+ # <tool id="Cut1" version="1.0.1" />
+ # </repository>
+ name = elem.get( 'name' )
+ description = elem.get( 'description' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
+ clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
+ if self.__isinstalled( elem, clone_dir ):
+ log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
+ else:
+ if section_name and section_id:
+ section_key = 'section_%s' % str( section_id )
+ if section_key in self.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in self.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % section_name )
+ tool_section = self.app.toolbox.tool_panel[ section_key ]
+ else:
+ # Appending a new section to self.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % section_name )
+ new_section_elem = Element( 'section' )
+ new_section_elem.attrib[ 'name' ] = section_name
+ new_section_elem.attrib[ 'id' ] = section_id
+ tool_section = ToolSection( new_section_elem )
+ self.app.toolbox.tool_panel[ section_key ] = tool_section
+ else:
+ tool_section = None
+ current_working_dir = os.getcwd()
+ tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
+ relative_install_dir = os.path.join( clone_dir, name )
+ returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
+ if returncode == 0:
+ returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
+ if returncode == 0:
+ metadata_dict = load_repository_contents( self.app,
+ name,
+ description,
+ self.repository_owner,
+ changeset_revision,
+ repository_clone_url,
+ self.install_tool_config,
+ self.tool_path,
+ tool_section,
+ relative_install_dir,
+ current_working_dir,
+ tmp_name )
+ # Add a new record to the tool_id_guid_map table for each
+ # tool in the repository if one doesn't already exist.
+ if 'tools' in metadata_dict:
+ tools_mapped = 0
+ for tool_dict in metadata_dict[ 'tools' ]:
+ flush_needed = False
+ tool_id = tool_dict[ 'id' ]
+ tool_version = tool_dict[ 'version' ]
+ guid = tool_dict[ 'guid' ]
+ tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
+ if tool_id_guid_map:
+ if tool_id_guid_map.guid != guid:
+ tool_id_guid_map.guid = guid
+ flush_needed = True
+ else:
+ tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id,
+ tool_version=tool_version,
+ tool_shed=self.tool_shed,
+ repository_owner=self.repository_owner,
+ repository_name=name,
+ guid=guid )
+ flush_needed = True
+ if flush_needed:
+ self.sa_session.add( tool_id_guid_map )
+ self.sa_session.flush()
+ tools_mapped += 1
+ log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ tmp_stderr.close()
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ tmp_stderr.close()
+ def install_section( self, elem ):
+ # Install 1 or more repositories into a section in the tool config. An entry looks something like:
+ # <section name="EMBOSS" id="EMBOSSLite">
+ # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="bdd88ae5d0ac">
+ # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" />
+ # ...
+ # </repository>
+ # </section>
+ section_name = elem.get( 'name' )
+ section_id = elem.get( 'id' )
+ for repository_elem in elem:
+ self.install_repository( repository_elem, section_name=section_name, section_id=section_id )
+ def __get_url_from_tool_shed( self, tool_shed ):
+ # The value of tool_shed is something like: toolshed.g2.bx.psu.edu
+ # We need the URL to this tool shed, which is something like:
+ # http://toolshed.g2.bx.psu.edu/
+ for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally
+ # installed must no longer be configured in tool_sheds_conf.xml.
+ return None
+ def __isinstalled( self, repository_elem, clone_dir ):
+ name = repository_elem.get( 'name' )
+ installed = False
+ for tool_elem in repository_elem:
+ tool_config = tool_elem.get( 'file' )
+ tool_id = tool_elem.get( 'id' )
+ tool_version = tool_elem.get( 'version' )
+ tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
+ if tigm:
+ # A record exists in the tool_id_guid_map table, so see if the repository is installed.
+ if os.path.exists( clone_dir ):
+ installed = True
+ break
+ return installed
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/tool_shed_registry.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/tool_shed_registry.py
@@ -0,0 +1,31 @@
+import sys, logging
+from galaxy.util import parse_xml
+from galaxy.util.odict import odict
+
+log = logging.getLogger( __name__ )
+
+if sys.version_info[:2] == ( 2, 4 ):
+ from galaxy import eggs
+ eggs.require( 'ElementTree' )
+ from elementtree import ElementTree
+else:
+ from xml.etree import ElementTree
+
+class Registry( object ):
+ def __init__( self, root_dir=None, config=None ):
+ self.tool_sheds = odict()
+ if root_dir and config:
+ # Parse datatypes_conf.xml
+ tree = parse_xml( config )
+ root = tree.getroot()
+ # Load datatypes and converters from config
+ log.debug( 'Loading references to tool sheds from %s' % config )
+ for elem in root.findall( 'tool_shed' ):
+ try:
+ name = elem.get( 'name', None )
+ url = elem.get( 'url', None )
+ if name and url:
+ self.tool_sheds[ name ] = url
+ log.debug( 'Loaded reference to tool shed: %s' % name )
+ except Exception, e:
+ log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/update_manager.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -0,0 +1,67 @@
+"""
+Determine if installed tool shed repositories have updates available in their respective tool sheds.
+"""
+import threading, urllib2, logging
+from galaxy.util import string_as_bool
+from galaxy.util.shed_util import *
+
+log = logging.getLogger( __name__ )
+
+class UpdateManager( object ):
+ def __init__( self, app ):
+ """
+ Check tool settings in tool_shed_install_config and install all tools that are
+ not already installed. The tool panel configuration file is the received
+ shed_tool_config, which defaults to shed_tool_conf.xml.
+ """
+ self.app = app
+ self.sa_session = self.app.model.context.current
+ # Ideally only one Galaxy server process
+ # should be able to check for repository updates.
+ self.running = True
+ self.sleeper = Sleeper()
+ self.restarter = threading.Thread( target=self.__restarter )
+ self.restarter.start()
+ self.seconds_to_sleep = app.config.hours_between_check * 3600
+ def __restarter( self ):
+ log.info( 'Update manager restarter starting up...' )
+ while self.running:
+ flush_needed = False
+ for repository in self.sa_session.query( self.app.model.ToolShedRepository ) \
+ .filter( and_( self.app.model.ToolShedRepository.table.c.update_available == False,
+ self.app.model.ToolShedRepository.table.c.deleted == False ) ):
+ if self.check_for_update( repository ):
+ repository.update_available = True
+ self.sa_session.add( repository )
+ flush_needed = True
+ if flush_needed:
+ self.sa_session.flush()
+ self.sleeper.sleep( self.seconds_to_sleep )
+ log.info( 'Transfer job restarter shutting down...' )
+ def check_for_update( self, repository ):
+ tool_shed_url = get_url_from_repository_tool_shed( self.app, repository )
+ url = '%s/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&webapp=update_manager' % \
+ ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ return string_as_bool( text )
+ def shutdown( self ):
+ self.running = False
+ self.sleeper.wake()
+
+class Sleeper( object ):
+ """
+ Provides a 'sleep' method that sleeps for a number of seconds *unless*
+ the notify method is called (from a different thread).
+ """
+ def __init__( self ):
+ self.condition = threading.Condition()
+ def sleep( self, seconds ):
+ self.condition.acquire()
+ self.condition.wait( seconds )
+ self.condition.release()
+ def wake( self ):
+ self.condition.acquire()
+ self.condition.notify()
+ self.condition.release()
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tools/install_manager.py
--- a/lib/galaxy/tools/install_manager.py
+++ /dev/null
@@ -1,160 +0,0 @@
-"""
-Manage automatic installation of tools configured in tool_shed_install.xml, all of which were
-at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool
-shed. Tools included in tool_shed_install.xml that have already been installed will not be
-re-installed.
-"""
-from galaxy.util.shed_util import *
-
-log = logging.getLogger( __name__ )
-
-class InstallManager( object ):
- def __init__( self, app, tool_shed_install_config, install_tool_config ):
- """
- Check tool settings in tool_shed_install_config and install all tools that are
- not already installed. The tool panel configuration file is the received
- shed_tool_config, which defaults to shed_tool_conf.xml.
- """
- self.app = app
- self.sa_session = self.app.model.context.current
- self.install_tool_config = install_tool_config
- # Parse shed_tool_config to get the install location (tool_path).
- tree = util.parse_xml( install_tool_config )
- root = tree.getroot()
- self.tool_path = root.get( 'tool_path' )
- self.app.toolbox.shed_tool_confs[ install_tool_config ] = self.tool_path
- # Parse tool_shed_install_config to check each of the tools.
- log.debug( "Parsing tool shed install configuration %s" % tool_shed_install_config )
- self.tool_shed_install_config = tool_shed_install_config
- tree = util.parse_xml( tool_shed_install_config )
- root = tree.getroot()
- self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
- log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) )
- self.repository_owner = 'devteam'
- for elem in root:
- if elem.tag == 'repository':
- self.install_repository( elem )
- elif elem.tag == 'section':
- self.install_section( elem )
- def install_repository( self, elem, section_name='', section_id='' ):
- # Install a single repository into the tool config. If outside of any sections, the entry looks something like:
- # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" changeset_revision="f3ed6cfe6402">
- # <tool id="Cut1" version="1.0.1" />
- # </repository>
- name = elem.get( 'name' )
- description = elem.get( 'description' )
- changeset_revision = elem.get( 'changeset_revision' )
- # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
- clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
- if self.__isinstalled( elem, clone_dir ):
- log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
- else:
- if section_name and section_id:
- section_key = 'section_%s' % str( section_id )
- if section_key in self.app.toolbox.tool_panel:
- # Appending a tool to an existing section in self.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % section_name )
- tool_section = self.app.toolbox.tool_panel[ section_key ]
- else:
- # Appending a new section to self.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % section_name )
- new_section_elem = Element( 'section' )
- new_section_elem.attrib[ 'name' ] = section_name
- new_section_elem.attrib[ 'id' ] = section_id
- tool_section = ToolSection( new_section_elem )
- self.app.toolbox.tool_panel[ section_key ] = tool_section
- else:
- tool_section = None
- current_working_dir = os.getcwd()
- tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
- repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
- relative_install_dir = os.path.join( clone_dir, name )
- returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
- if returncode == 0:
- metadata_dict = load_repository_contents( self.app,
- name,
- description,
- self.repository_owner,
- changeset_revision,
- repository_clone_url,
- self.install_tool_config,
- self.tool_path,
- tool_section,
- relative_install_dir,
- current_working_dir,
- tmp_name )
- # Add a new record to the tool_id_guid_map table for each
- # tool in the repository if one doesn't already exist.
- if 'tools' in metadata_dict:
- tools_mapped = 0
- for tool_dict in metadata_dict[ 'tools' ]:
- flush_needed = False
- tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- guid = tool_dict[ 'guid' ]
- tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
- if tool_id_guid_map:
- if tool_id_guid_map.guid != guid:
- tool_id_guid_map.guid = guid
- flush_needed = True
- else:
- tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id,
- tool_version=tool_version,
- tool_shed=self.tool_shed,
- repository_owner=self.repository_owner,
- repository_name=name,
- guid=guid )
- flush_needed = True
- if flush_needed:
- self.sa_session.add( tool_id_guid_map )
- self.sa_session.flush()
- tools_mapped += 1
- log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
- tmp_stderr.close()
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) )
- tmp_stderr.close()
- def install_section( self, elem ):
- # Install 1 or more repositories into a section in the tool config. An entry looks something like:
- # <section name="EMBOSS" id="EMBOSSLite">
- # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="bdd88ae5d0ac">
- # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" />
- # ...
- # </repository>
- # </section>
- section_name = elem.get( 'name' )
- section_id = elem.get( 'id' )
- for repository_elem in elem:
- self.install_repository( repository_elem, section_name=section_name, section_id=section_id )
- def __get_url_from_tool_shed( self, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu
- # We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally
- # installed must no longer be configured in tool_sheds_conf.xml.
- return None
- def __isinstalled( self, repository_elem, clone_dir ):
- name = repository_elem.get( 'name' )
- installed = False
- for tool_elem in repository_elem:
- tool_config = tool_elem.get( 'file' )
- tool_id = tool_elem.get( 'id' )
- tool_version = tool_elem.get( 'version' )
- tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
- if tigm:
- # A record exists in the tool_id_guid_map table, so see if the repository is installed.
- if os.path.exists( clone_dir ):
- installed = True
- break
- return installed
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tools/tool_shed_registry.py
--- a/lib/galaxy/tools/tool_shed_registry.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import sys, logging
-from galaxy.util import parse_xml
-from galaxy.util.odict import odict
-
-log = logging.getLogger( __name__ )
-
-if sys.version_info[:2] == ( 2, 4 ):
- from galaxy import eggs
- eggs.require( 'ElementTree' )
- from elementtree import ElementTree
-else:
- from xml.etree import ElementTree
-
-class Registry( object ):
- def __init__( self, root_dir=None, config=None ):
- self.tool_sheds = odict()
- if root_dir and config:
- # Parse datatypes_conf.xml
- tree = parse_xml( config )
- root = tree.getroot()
- # Load datatypes and converters from config
- log.debug( 'Loading references to tool sheds from %s' % config )
- for elem in root.findall( 'tool_shed' ):
- try:
- name = elem.get( 'name', None )
- url = elem.get( 'url', None )
- if name and url:
- self.tool_sheds[ name ] = url
- log.debug( 'Loaded reference to tool shed: %s' % name )
- except Exception, e:
- log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -65,7 +65,8 @@
return tool_shed_url.rstrip( '/' )
def clone_repository( name, clone_dir, current_working_dir, repository_clone_url ):
log.debug( "Installing repository '%s'" % name )
- os.makedirs( clone_dir )
+ if not os.path.exists( clone_dir ):
+ os.makedirs( clone_dir )
log.debug( 'Cloning %s' % repository_clone_url )
cmd = 'hg clone %s' % repository_clone_url
tmp_name = tempfile.NamedTemporaryFile().name
@@ -88,15 +89,18 @@
tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
if tool_shed_repository:
if tool_shed_repository.deleted:
+ tool_shed_repository.description = description
+ tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.metadata = metadata_dict
+ tool_shed_repository.includes_datatypes = includes_datatypes
tool_shed_repository.deleted = False
- # Reset includes_datatypes in case metadata changed since last installed.
- tool_shed_repository.includes_datatypes = includes_datatypes
flush_needed = True
else:
tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
name=name,
description=description,
owner=owner,
+ installed_changeset_revision=changeset_revision,
changeset_revision=changeset_revision,
metadata=metadata_dict,
includes_datatypes=includes_datatypes )
@@ -318,6 +322,20 @@
app.model.ToolIdGuidMap.table.c.repository_owner == repository_owner,
app.model.ToolIdGuidMap.table.c.repository_name == repository_name ) ) \
.first()
+def get_url_from_repository_tool_shed( app, repository ):
+ """
+ This method is used by the UpdateManager, which does not have access to trans.
+ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu
+ We need the URL to this tool shed, which is something like: http://toolshed.g2.bx.psu.edu/
+ """
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( repository.tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally
+ # installed must no longer be configured in tool_sheds_conf.xml.
+ return None
def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically
@@ -532,7 +550,19 @@
if level and ( not elem.tail or not elem.tail.strip() ):
elem.tail = i + pad
return elem
-def update_repository( current_working_dir, relative_install_dir, changeset_revision ):
+def pull_repository( current_working_dir, repo_files_dir, name ):
+ # Pull the latest possible contents to the repository.
+ log.debug( "Pulling latest updates to the repository named '%s'" % name )
+ cmd = 'hg pull'
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stderr = open( tmp_name, 'wb' )
+ os.chdir( repo_files_dir )
+ proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stderr.close()
+ return returncode, tmp_name
+def update_repository( current_working_dir, repo_files_dir, changeset_revision ):
# Update the cloned repository to changeset_revision. It is imperative that the
# installed repository is updated to the desired changeset_revision before metadata
# is set because the process for setting metadata uses the repository files on disk.
@@ -540,7 +570,7 @@
cmd = 'hg update -r %s' % changeset_revision
tmp_name = tempfile.NamedTemporaryFile().name
tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( relative_install_dir )
+ os.chdir( repo_files_dir )
proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
os.chdir( current_working_dir )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -3,6 +3,55 @@
log = logging.getLogger( __name__ )
+class ToolIdGuidMapGrid( grids.Grid ):
+ class ToolIdColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.tool_id
+ class ToolVersionColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.tool_version
+ class ToolGuidColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.guid
+ class ToolShedColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.tool_shed
+ class RepositoryNameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.repository_name
+ class RepositoryOwnerColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.repository_owner
+ # Grid definition
+ title = "Map tool id to guid"
+ model_class = model.ToolIdGuidMap
+ template='/admin/tool_shed_repository/grid.mako'
+ default_sort_key = "tool_id"
+ columns = [
+ ToolIdColumn( "Tool id" ),
+ ToolVersionColumn( "Version" ),
+ ToolGuidColumn( "Guid" ),
+ ToolShedColumn( "Tool shed" ),
+ RepositoryNameColumn( "Repository name" ),
+ RepositoryOwnerColumn( "Repository owner" )
+ ]
+ columns.append( grids.MulticolFilterColumn( "Search repository name",
+ cols_to_filter=[ columns[0], columns[2], columns[4], columns[5] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "Manage installed tool shed repositories", dict( controller='admin_toolshed', action='browse_repositories' ) )
+ ]
+ operations = []
+ standard_filters = []
+ default_filter = {}
+ num_rows_per_page = 50
+ preserve_state = False
+ use_paging = True
+ def build_initial_query( self, trans, **kwd ):
+ return trans.sa_session.query( self.model_class )
+
class RepositoryListGrid( grids.Grid ):
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
@@ -46,6 +95,9 @@
key="free-text-search",
visible=False,
filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "View tool id guid map", dict( controller='admin_toolshed', action='browse_tool_id_guid_map' ) )
+ ]
operations = [ grids.GridOperation( "Get updates",
allow_multiple=False,
condition=( lambda item: not item.deleted ),
@@ -62,9 +114,14 @@
class AdminToolshed( AdminGalaxy ):
repository_list_grid = RepositoryListGrid()
+ tool_id_guid_map_grid = ToolIdGuidMapGrid()
@web.expose
@web.require_admin
+ def browse_tool_id_guid_map( self, trans, **kwd ):
+ return self.tool_id_guid_map_grid( trans, **kwd )
+ @web.expose
+ @web.require_admin
def browse_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -272,7 +329,7 @@
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans, repository )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
url = '%s/repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
( tool_shed_url, url_for( '', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
return trans.response.send_redirect( url )
@@ -296,30 +353,14 @@
current_working_dir = os.getcwd()
relative_install_dir = self.__get_relative_install_dir( trans, repository )
if relative_install_dir:
- # Update the cloned repository to changeset_revision.
repo_files_dir = os.path.join( relative_install_dir, name )
- log.debug( "Updating cloned repository named '%s' from revision '%s' to revision '%s'..." % \
- ( name, changeset_revision, latest_changeset_revision ) )
- cmd = 'hg pull'
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
+ returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
if returncode == 0:
- cmd = 'hg update -r %s' % latest_changeset_revision
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
+ returncode, tmp_name = update_repository( current_working_dir, repo_files_dir, latest_changeset_revision )
if returncode == 0:
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
+ repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
@@ -370,7 +411,7 @@
def __get_relative_install_dir( self, trans, repository ):
# Get the directory where the repository is install.
tool_shed = clean_tool_shed_url( repository.tool_shed )
- partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.changeset_revision )
+ partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
shed_tool_confs = trans.app.toolbox.shed_tool_confs
relative_install_dir = None
@@ -396,7 +437,7 @@
return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_revision )
def __generate_clone_url( self, trans, repository ):
"""Generate the URL for cloning a repository."""
- tool_shed_url = get_url_from_repository_tool_shed( trans, repository )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
## ---- Utility methods -------------------------------------------------------
@@ -426,23 +467,3 @@
def get_repository( trans, id ):
"""Get a tool_shed_repository from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
-def get_repository_by_name_owner_changeset_revision( trans, name, owner, changeset_revision ):
- """Get a repository from the database via name owner and changeset_revision"""
- return trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
- trans.model.ToolShedRepository.table.c.owner == owner,
- trans.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
-def get_url_from_repository_tool_shed( trans, repository ):
- # The stored value of repository.tool_shed is something like:
- # toolshed.g2.bx.psu.edu
- # We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in trans.app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( repository.tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally
- # installed must no longer be configured in tool_sheds_conf.xml.
- return None
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -764,22 +764,32 @@
return trans.response.send_redirect( url )
@web.expose
def check_for_updates( self, trans, **kwd ):
+ # Handle a request from a local Galaxy instance. If the request originated with the
+ # Galaxy instances' UpdateManager, the value of 'webapp' will be 'update_manager'.
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- galaxy_url = kwd[ 'galaxy_url' ]
+ # If the request originated with the UpdateManager, it will not include a galaxy_url.
+ galaxy_url = kwd.get( 'galaxy_url', '' )
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
webapp = params.get( 'webapp', 'community' )
- # Start building up the url to redirect back to the calling Galaxy instance.
- url = '%s/admin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '', qualified=True ) )
repository = get_repository_by_name_and_owner( trans, name, owner )
- url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( repository.name, repository.user.username, changeset_revision )
+ from_update_manager = webapp == 'update_manager'
+ if from_update_manager:
+ update = 'true'
+ no_update = 'false'
+ else:
+ # Start building up the url to redirect back to the calling Galaxy instance.
+ url = '%s/admin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '', qualified=True ) )
+ url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( repository.name, repository.user.username, changeset_revision )
if changeset_revision == repository.tip:
# If changeset_revision is the repository tip, then
# we know there are no additional updates for the tools.
+ if from_update_manager:
+ return no_update
url += repository.tip
else:
repository_metadata = get_repository_metadata_by_changeset_revision( trans,
@@ -788,6 +798,8 @@
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this
# repository, then we know there are no additional updates for the tools.
+ if from_update_manager:
+ return no_update
url += changeset_revision
else:
# The changeset_revision column in the repository_metadata table has been
@@ -836,15 +848,21 @@
if tool_guids == metadata_tool_guids:
# We've found the repository_metadata record whose changeset_revision
# value has been updated.
+ if from_update_manager:
+ return update
url += repository_metadata.changeset_revision
found = True
break
if not found:
# There must be a problem in the data, so we'll just send back the received changeset_revision.
log.debug( "Possible data corruption - updated repository_metadata cannot be found for repository id %d." % repository.id )
+ if from_update_manager:
+ return no_update
url += changeset_revision
else:
# There are not tools in the changeset_revision, so no tool updates are possible.
+ if from_update_manager:
+ return no_update
url += changeset_revision
return trans.response.send_redirect( url )
@web.expose
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -147,6 +147,13 @@
# if appropriate or use a different file name for the setting.
#install_tool_config_file = shed_tool_conf.xml
+# Enable automatic polling of relative tool sheds to see if any updates
+# are available for installed repositories. Ideally only one Galaxy
+# server process should be able to check for repository updates. The
+# setting for hours_between_check should be an integer between 1 and 24.
+#enable_tool_shed_check = False
+#hours_between_check = 12
+
# Directory where data used by tools is located, see the samples in that
# directory and the wiki for help:
# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4a39bc209487/
changeset: 4a39bc209487
user: dan
date: 2011-12-14 15:37:45
summary: Update EBI SRA tool's input action.
affected #: 1 file
diff -r 66a7957ed97403fb4e17c9c747c4624e1c1b2f3d -r 4a39bc2094875a9878f07f27f7307976e05e8b87 tools/data_source/ebi_sra.xml
--- a/tools/data_source/ebi_sra.xml
+++ b/tools/data_source/ebi_sra.xml
@@ -1,10 +1,10 @@
<?xml version="1.0"?>
-<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.0">
+<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.1"><description>ENA SRA</description><!-- This paython script imports the file into Galaxy --><command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <!-- The URL where Galaxy will forwars the user when this tool is accessed from the Get Data menu -->
- <inputs action="http://www.ebi.ac.uk/ena/" check_values="false" method="get">
+ <!-- The URL where Galaxy will forwards the user when this tool is accessed from the Get Data menu -->
+ <inputs action="http://www.ebi.ac.uk/ena/data/search" check_values="false" method="get"><display>go to EBI SRA server $GALAXY_URL</display></inputs><uihints minwidth="800"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Provide default for robots.txt being pulled from config.
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/66a7957ed974/
changeset: 66a7957ed974
user: dannon
date: 2011-12-13 17:04:07
summary: Provide default for robots.txt being pulled from config.
affected #: 1 file
diff -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 -r 66a7957ed97403fb4e17c9c747c4624e1c1b2f3d lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -251,7 +251,7 @@
urlmap["/static/scripts"] = Static( conf.get( "static_scripts_dir" ), cache_time )
urlmap["/static/style"] = Static( conf.get( "static_style_dir" ), cache_time )
urlmap["/favicon.ico"] = Static( conf.get( "static_favicon_dir" ), cache_time )
- urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt" ), cache_time )
+ urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt", 'static/robots.txt'), cache_time )
# URL mapper becomes the root webapp
return urlmap
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Fix default static wrapping to work with favicon.ico and robots.txt (and other static files that may need to be mapped to / in the future)
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ad9a6d8afded/
changeset: ad9a6d8afded
user: dannon
date: 2011-12-13 16:42:38
summary: Fix default static wrapping to work with favicon.ico and robots.txt (and other static files that may need to be mapped to / in the future)
affected #: 3 files
diff -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -251,6 +251,7 @@
urlmap["/static/scripts"] = Static( conf.get( "static_scripts_dir" ), cache_time )
urlmap["/static/style"] = Static( conf.get( "static_style_dir" ), cache_time )
urlmap["/favicon.ico"] = Static( conf.get( "static_favicon_dir" ), cache_time )
+ urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt" ), cache_time )
# URL mapper becomes the root webapp
return urlmap
diff -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 lib/galaxy/web/framework/middleware/static.py
--- a/lib/galaxy/web/framework/middleware/static.py
+++ b/lib/galaxy/web/framework/middleware/static.py
@@ -19,6 +19,12 @@
def __call__( self, environ, start_response ):
path_info = environ.get('PATH_INFO', '')
if not path_info:
+ #See if this is a static file hackishly mapped.
+ if os.path.exists(self.directory) and os.path.isfile(self.directory):
+ app = fileapp.FileApp(self.directory)
+ if self.cache_seconds:
+ app.cache_control( max_age = int( self.cache_seconds ) )
+ return app(environ, start_response)
return self.add_slash(environ, start_response)
if path_info == '/':
# @@: This should obviously be configurable
@@ -45,6 +51,6 @@
if self.cache_seconds:
app.cache_control( max_age = int( self.cache_seconds ) )
return app(environ, start_response)
-
+
def make_static( global_conf, document_root, cache_seconds=None ):
- return CacheableStaticURLParser( document_root, cache_seconds )
\ No newline at end of file
+ return CacheableStaticURLParser( document_root, cache_seconds )
diff -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -247,6 +247,7 @@
static_favicon_dir = %(here)s/static/favicon.ico
static_scripts_dir = %(here)s/static/scripts/
static_style_dir = %(here)s/static/june_2007_style/blue
+static_robots_txt = %(here)s/static/robots.txt
# -- Advanced proxy features
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Add EBI ENA SRA tool contributed by Iain Cleland.
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d00337a396ff/
changeset: d00337a396ff
user: dan
date: 2011-12-13 15:45:49
summary: Add EBI ENA SRA tool contributed by Iain Cleland.
affected #: 3 files
diff -r 7dd3a089101138a4796eb73a5f1391d2f436723e -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -5,6 +5,7 @@
<tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" /><tool file="data_source/bx_browser.xml" />
+ <tool file="data_source/ebi_sra.xml"/><tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/flymine.xml" />
diff -r 7dd3a089101138a4796eb73a5f1391d2f436723e -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -6,6 +6,7 @@
<tool file="data_source/ucsc_tablebrowser_test.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" /><tool file="data_source/bx_browser.xml" />
+ <tool file="data_source/ebi_sra.xml"/><tool file="data_source/microbial_import.xml" /><tool file="data_source/biomart.xml" /><tool file="data_source/biomart_test.xml" />
diff -r 7dd3a089101138a4796eb73a5f1391d2f436723e -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 tools/data_source/ebi_sra.xml
--- /dev/null
+++ b/tools/data_source/ebi_sra.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.0">
+ <description>ENA SRA</description>
+ <!-- This paython script imports the file into Galaxy -->
+ <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+ <!-- The URL where Galaxy will forwars the user when this tool is accessed from the Get Data menu -->
+ <inputs action="http://www.ebi.ac.uk/ena/" check_values="false" method="get">
+ <display>go to EBI SRA server $GALAXY_URL</display>
+ </inputs>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="fastq"/>
+ </outputs>
+ <options sanitize="False" refresh="True"/>
+</tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0