galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
January 2013
- 1 participants
- 160 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6538175fb3e6/
changeset: 6538175fb3e6
user: jgoecks
date: 2013-01-17 21:57:21
summary: Remove incorrect test parameter.
affected #: 1 file
diff -r d4a61389668164e833eef418ab03c1467ba64d13 -r 6538175fb3e6483895aaadfdeddd09aac558fdf5 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -462,7 +462,6 @@
<param name="use_annotations" value="No" /><param name="use_juncs" value="No" /><param name="no_novel_juncs" value="No" />
- <param name="report_discordant_pairs" value="No" /><param name="use_search" value="Yes" /><param name="min_coverage_intron" value="50" /><param name="max_coverage_intron" value="20000" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/44d9d215b9c3/
changeset: 44d9d215b9c3
user: epaniagu
date: 2011-11-22 21:42:18
summary: fix add/remove buttons in Repeat elements
affected #: 1 file
diff -r 9d6a9963b0da21fe3139fcefbd11c1ec6290a529 -r 44d9d215b9c337cf9b988976bf93f429dbd84ccb templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -139,13 +139,17 @@
%><div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
- <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ %if input.min < len( repeat_state ):
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ %endif
</div>
%if rep_errors.has_key( '__index__' ):
<div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
%endfor
- <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ %if input.max > len( repeat_state ):
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ %endif
</div>
%elif input.type == "conditional":
<%
https://bitbucket.org/galaxy/galaxy-central/commits/b6ff1a695e07/
changeset: b6ff1a695e07
user: epaniagu
date: 2011-11-22 21:53:56
summary: replace len( repeat_state ) with a variable so there's only one call
affected #: 1 file
diff -r 44d9d215b9c337cf9b988976bf93f429dbd84ccb -r b6ff1a695e07e6a2443a3d6d0aa9a9066a155dbc templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -127,8 +127,11 @@
</div>
%endif
</div>
- <% repeat_state = tool_state[input.name] %>
- %for i in range( len( repeat_state ) ):
+ <%
+ repeat_state = tool_state[input.name]
+ num_repeats = len( repeat_state )
+ %>
+ %for i in range( num_repeats ):
<div class="repeat-group-item"><%
if input.name in errors:
@@ -139,7 +142,7 @@
%><div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
- %if input.min < len( repeat_state ):
+ %if input.min < num_repeats:
<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
%endif
</div>
@@ -147,7 +150,7 @@
<div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
%endfor
- %if input.max > len( repeat_state ):
+ %if input.max > num_repeats:
<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
%endif
</div>
https://bitbucket.org/galaxy/galaxy-central/commits/8d068273cf5b/
changeset: 8d068273cf5b
user: jgoecks
date: 2013-01-17 21:32:34
summary: Merged in epaniagu/galaxy-central (pull request #24: Fix Add/Remove buttons for Repeat groups)
affected #: 1 file
diff -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 -r 8d068273cf5b1160a27977727a7ab6f2237d4bb7 templates/tool_form.mako
--- a/templates/tool_form.mako
+++ b/templates/tool_form.mako
@@ -135,8 +135,11 @@
</div>
%endif
</div>
- <% repeat_state = tool_state[input.name] %>
- %for i in range( len( repeat_state ) ):
+ <%
+ repeat_state = tool_state[input.name]
+ num_repeats = len( repeat_state )
+ %>
+ %for i in range( num_repeats ):
<div class="repeat-group-item"><%
if input.name in errors:
@@ -147,13 +150,25 @@
%><div class="form-title-row"><strong>${input.title} ${i + 1}</strong></div>
${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
+<<<<<<< local
<div class="form-row"><input type="submit" class="btn" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+=======
+ %if input.min < num_repeats:
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ %endif
+>>>>>>> other
</div>
%if rep_errors.has_key( '__index__' ):
<div><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${rep_errors['__index__']}</span></div>
%endif
%endfor
+<<<<<<< local
<div class="form-row"><input type="submit" class="btn" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+=======
+ %if input.max > num_repeats:
+ <div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ %endif
+>>>>>>> other
</div>
%elif input.type == "conditional":
<%
https://bitbucket.org/galaxy/galaxy-central/commits/d4a613896681/
changeset: d4a613896681
user: jgoecks
date: 2013-01-17 21:34:04
summary: Automated merge.
affected #: 1 file
diff -r 8d068273cf5b1160a27977727a7ab6f2237d4bb7 -r d4a61389668164e833eef418ab03c1467ba64d13 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -50,6 +50,41 @@
migrated_tool_panel_config = 'migrated_tools_conf.xml'
installed_tool_panel_configs = [ 'shed_tool_conf.xml' ]
+# should this serve static resources (scripts, images, styles, etc.)
+STATIC_ENABLED = True
+
+def get_static_settings():
+ """Returns dictionary of the settings necessary for a galaxy App
+ to be wrapped in the static middleware.
+
+ This mainly consists of the filesystem locations of url-mapped
+ static resources.
+ """
+ cwd = os.getcwd()
+ static_dir = os.path.join( cwd, 'static' )
+ #TODO: these should be copied from universe_wsgi.ini
+ return dict(
+ #TODO: static_enabled needed here?
+ static_enabled = True,
+ static_cache_time = 360,
+ static_dir = static_dir,
+ static_images_dir = os.path.join( static_dir, 'images', '' ),
+ static_favicon_dir = os.path.join( static_dir, 'favicon.ico' ),
+ static_scripts_dir = os.path.join( static_dir, 'scripts', '' ),
+ static_style_dir = os.path.join( static_dir, 'june_2007_style', 'blue' ),
+ static_robots_txt = os.path.join( static_dir, 'robots.txt' ),
+ )
+
+def get_webapp_global_conf():
+ """Get the global_conf dictionary sent as the first argument to app_factory.
+ """
+ # (was originally sent 'dict()') - nothing here for now except static settings
+ global_conf = dict()
+ if STATIC_ENABLED:
+ global_conf.update( get_static_settings() )
+ return global_conf
+
+
def parse_tool_panel_config( config, shed_tools_dict ):
"""
Parse a shed-related tool panel config to generate the shed_tools_dict. This only happens when testing tools installed from the tool shed.
@@ -289,7 +324,8 @@
server = None
if start_server:
- webapp = buildapp.app_factory( dict(), use_translogger=False, static_enabled=False, app=app )
+ webapp = buildapp.app_factory( get_webapp_global_conf(), app=app,
+ use_translogger=False, static_enabled=STATIC_ENABLED )
if galaxy_test_port is not None:
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Enable serving static files when running functional tests
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/da89c3556c27/
changeset: da89c3556c27
user: carlfeberhard
date: 2013-01-17 20:58:43
summary: Enable serving static files when running functional tests
affected #: 1 file
diff -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 -r da89c3556c27c8f1e5acf943c6983be5437efac6 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -50,6 +50,41 @@
migrated_tool_panel_config = 'migrated_tools_conf.xml'
installed_tool_panel_configs = [ 'shed_tool_conf.xml' ]
+# should this serve static resources (scripts, images, styles, etc.)
+STATIC_ENABLED = True
+
+def get_static_settings():
+ """Returns dictionary of the settings necessary for a galaxy App
+ to be wrapped in the static middleware.
+
+ This mainly consists of the filesystem locations of url-mapped
+ static resources.
+ """
+ cwd = os.getcwd()
+ static_dir = os.path.join( cwd, 'static' )
+ #TODO: these should be copied from universe_wsgi.ini
+ return dict(
+ #TODO: static_enabled needed here?
+ static_enabled = True,
+ static_cache_time = 360,
+ static_dir = static_dir,
+ static_images_dir = os.path.join( static_dir, 'images', '' ),
+ static_favicon_dir = os.path.join( static_dir, 'favicon.ico' ),
+ static_scripts_dir = os.path.join( static_dir, 'scripts', '' ),
+ static_style_dir = os.path.join( static_dir, 'june_2007_style', 'blue' ),
+ static_robots_txt = os.path.join( static_dir, 'robots.txt' ),
+ )
+
+def get_webapp_global_conf():
+ """Get the global_conf dictionary sent as the first argument to app_factory.
+ """
+ # (was originally sent 'dict()') - nothing here for now except static settings
+ global_conf = dict()
+ if STATIC_ENABLED:
+ global_conf.update( get_static_settings() )
+ return global_conf
+
+
def parse_tool_panel_config( config, shed_tools_dict ):
"""
Parse a shed-related tool panel config to generate the shed_tools_dict. This only happens when testing tools installed from the tool shed.
@@ -289,7 +324,8 @@
server = None
if start_server:
- webapp = buildapp.app_factory( dict(), use_translogger=False, static_enabled=False, app=app )
+ webapp = buildapp.app_factory( get_webapp_global_conf(), app=app,
+ use_translogger=False, static_enabled=STATIC_ENABLED )
if galaxy_test_port is not None:
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2eb6dddb3866/
changeset: 2eb6dddb3866
user: fangly
date: 2011-10-05 10:04:58
summary: Paired-end code that properly ignores description part of FASTQ headers
affected #: 1 file
diff -r 087a766b3eca312d49caffa6b821d304658825ae -r 2eb6dddb3866adef30b72e92e747d9ece4e11da9 lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -514,9 +514,13 @@
self.apply_galaxy_conventions = apply_galaxy_conventions
def close( self ):
return self.file.close()
- def get( self, sequence_id ):
- if not isinstance( sequence_id, basestring ):
- sequence_id = sequence_id.identifier
+ def get( self, sequence_identifier ):
+ # Input is either a sequence ID or a sequence object
+ if not isinstance( sequence_identifier, basestring ):
+ # Input was a sequence object (not a sequence ID). Get the sequence ID
+ sequence_identifier = sequence_identifier.identifier
+ # Get only the ID part of the sequence header
+ sequence_id, sequence_sep, sequence_desc = sequence_identifier.partition(' ')
rval = None
if sequence_id in self.offset_dict:
initial_offset = self.file.tell()
@@ -525,7 +529,7 @@
del self.offset_dict[ sequence_id ]
self.file.seek( seq_offset )
rval = self.reader.next()
- #assert rval.identifier == sequence_id, 'seq id mismatch' #should be able to remove this
+ #assert rval.id == sequence_id, 'seq id mismatch' #should be able to remove this
self.file.seek( initial_offset )
else:
while True:
@@ -535,13 +539,14 @@
except StopIteration:
self.eof = True
break #eof, id not found, will return None
- if fastq_read.identifier == sequence_id:
+ fastq_read_id, fastq_read_sep, fastq_read_desc = fastq_read.identifier.partition(' ')
+ if fastq_read_id == sequence_id:
rval = fastq_read
break
else:
- if fastq_read.identifier not in self.offset_dict:
- self.offset_dict[ fastq_read.identifier ] = []
- self.offset_dict[ fastq_read.identifier ].append( offset )
+ if fastq_read_id not in self.offset_dict:
+ self.offset_dict[ fastq_read_id ] = []
+ self.offset_dict[ fastq_read_id ].append( offset )
if rval is not None and self.apply_galaxy_conventions:
rval.apply_galaxy_conventions()
return rval
@@ -582,16 +587,18 @@
self.format = format
self.force_quality_encoding = force_quality_encoding
def join( self, read1, read2 ):
- if read1.identifier.endswith( '/2' ) and read2.identifier.endswith( '/1' ):
+ read1_id, read1_sep, read1_desc = read1.identifier.partition(' ')
+ read2_id, read2_sep, read2_desc = read2.identifier.partition(' ')
+ if read1_id.endswith( '/2' ) and read2_id.endswith( '/1' ):
#swap 1 and 2
tmp = read1
read1 = read2
read2 = tmp
del tmp
- if read1.identifier.endswith( '/1' ) and read2.identifier.endswith( '/2' ):
- identifier = read1.identifier[:-2]
- else:
- identifier = read1.identifier
+ if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
+ read1_id = read1_id[:-2]
+
+ identifier = read1_id + ' ' + read1_desc
#use force quality encoding, if not present force to encoding of first read
force_quality_encoding = self.force_quality_encoding
@@ -621,17 +628,18 @@
rval.quality = "%s %s" % ( new_read1.quality.strip(), new_read2.quality.strip() )
return rval
def get_paired_identifier( self, fastq_read ):
- identifier = fastq_read.identifier
- if identifier[-2] == '/':
- if identifier[-1] == "1":
- identifier = "%s2" % identifier[:-1]
- elif identifier[-1] == "2":
- identifier = "%s1" % identifier[:-1]
- return identifier
+ read_id, read_sep, read_desc = fastq_read.identifier.partition(' ')
+ if read_id[-2] == '/':
+ if read_id[-1] == "1":
+ read_id = "%s2" % read_id[:-1]
+ elif read_id[-1] == "2":
+ read_id = "%s1" % read_id[:-1]
+ return read_id
def is_first_mate( self, sequence_id ):
is_first = None
if not isinstance( sequence_id, basestring ):
sequence_id = sequence_id.identifier
+ sequence_id, sequence_sep, sequence_desc = sequence_id.partition(' ')
if sequence_id[-2] == '/':
if sequence_id[-1] == "1":
is_first = True
https://bitbucket.org/galaxy/galaxy-central/commits/34e7cf3bcef0/
changeset: 34e7cf3bcef0
user: fangly
date: 2011-11-30 02:38:52
summary: Avoid trailing whitespace
affected #: 1 file
diff -r 2eb6dddb3866adef30b72e92e747d9ece4e11da9 -r 34e7cf3bcef0eb7bf7d0684e8ac5d91e03750d8c lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -597,8 +597,10 @@
del tmp
if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
read1_id = read1_id[:-2]
-
- identifier = read1_id + ' ' + read1_desc
+
+ identifier = read1_id
+ if read1_desc:
+ identifier = identifier + ' ' + read1_desc
#use force quality encoding, if not present force to encoding of first read
force_quality_encoding = self.force_quality_encoding
https://bitbucket.org/galaxy/galaxy-central/commits/7d4a431f7188/
changeset: 7d4a431f7188
user: fangly
date: 2011-11-30 03:01:07
summary: Updated tests for FASTQ interlacer/deinterlacer tool
affected #: 2 files
diff -r 34e7cf3bcef0eb7bf7d0684e8ac5d91e03750d8c -r 7d4a431f7188d71d5e0ba2655a10145ecbdb4468 test-data/paired_end_2.fastqsanger
--- a/test-data/paired_end_2.fastqsanger
+++ b/test-data/paired_end_2.fastqsanger
@@ -1,6 +1,6 @@
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/2
CTCGCACGGCCGCCTCGACCACTTGGTCTGGCGTCATGCGCAATTTTTTCTCCATGTGGAACGGGCTGGTGGCGATGAACGTATGAATATGCCCCCGCGCT
diff -r 34e7cf3bcef0eb7bf7d0684e8ac5d91e03750d8c -r 7d4a431f7188d71d5e0ba2655a10145ecbdb4468 test-data/paired_end_merged.fastqsanger
--- a/test-data/paired_end_merged.fastqsanger
+++ b/test-data/paired_end_merged.fastqsanger
@@ -2,9 +2,9 @@
NACATCAACACTCAGTAACGGCTGGCGCAAAATGGCATTGATTAACGAAGACTTCCCGCGCGTGAAGGCGCCGGCAAACGAGGCTCGGGAAGGGGCTCCCG
+1539:931/1
BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/1
NCGGAGACTTCGAGGCCATCCAGTCGATTGCCAAAGTCATCAAGGGGTCGACGATCTGCTCCCTTGCCCGTTCCAACGAGAATGAAATCCGCCGCGCGTGG
https://bitbucket.org/galaxy/galaxy-central/commits/7848d6fd1b7a/
changeset: 7848d6fd1b7a
user: jgoecks
date: 2013-01-17 20:12:57
summary: Merged in fangly/galaxy-central (pull request #8: Paired-end code mishandles description of FASTQ headers)
affected #: 3 files
diff -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 lib/galaxy_utils/sequence/fastq.py
--- a/lib/galaxy_utils/sequence/fastq.py
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -514,9 +514,13 @@
self.apply_galaxy_conventions = apply_galaxy_conventions
def close( self ):
return self.file.close()
- def get( self, sequence_id ):
- if not isinstance( sequence_id, basestring ):
- sequence_id = sequence_id.identifier
+ def get( self, sequence_identifier ):
+ # Input is either a sequence ID or a sequence object
+ if not isinstance( sequence_identifier, basestring ):
+ # Input was a sequence object (not a sequence ID). Get the sequence ID
+ sequence_identifier = sequence_identifier.identifier
+ # Get only the ID part of the sequence header
+ sequence_id, sequence_sep, sequence_desc = sequence_identifier.partition(' ')
rval = None
if sequence_id in self.offset_dict:
initial_offset = self.file.tell()
@@ -525,7 +529,7 @@
del self.offset_dict[ sequence_id ]
self.file.seek( seq_offset )
rval = self.reader.next()
- #assert rval.identifier == sequence_id, 'seq id mismatch' #should be able to remove this
+ #assert rval.id == sequence_id, 'seq id mismatch' #should be able to remove this
self.file.seek( initial_offset )
else:
while True:
@@ -535,13 +539,14 @@
except StopIteration:
self.eof = True
break #eof, id not found, will return None
- if fastq_read.identifier == sequence_id:
+ fastq_read_id, fastq_read_sep, fastq_read_desc = fastq_read.identifier.partition(' ')
+ if fastq_read_id == sequence_id:
rval = fastq_read
break
else:
- if fastq_read.identifier not in self.offset_dict:
- self.offset_dict[ fastq_read.identifier ] = []
- self.offset_dict[ fastq_read.identifier ].append( offset )
+ if fastq_read_id not in self.offset_dict:
+ self.offset_dict[ fastq_read_id ] = []
+ self.offset_dict[ fastq_read_id ].append( offset )
if rval is not None and self.apply_galaxy_conventions:
rval.apply_galaxy_conventions()
return rval
@@ -582,16 +587,20 @@
self.format = format
self.force_quality_encoding = force_quality_encoding
def join( self, read1, read2 ):
- if read1.identifier.endswith( '/2' ) and read2.identifier.endswith( '/1' ):
+ read1_id, read1_sep, read1_desc = read1.identifier.partition(' ')
+ read2_id, read2_sep, read2_desc = read2.identifier.partition(' ')
+ if read1_id.endswith( '/2' ) and read2_id.endswith( '/1' ):
#swap 1 and 2
tmp = read1
read1 = read2
read2 = tmp
del tmp
- if read1.identifier.endswith( '/1' ) and read2.identifier.endswith( '/2' ):
- identifier = read1.identifier[:-2]
- else:
- identifier = read1.identifier
+ if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
+ read1_id = read1_id[:-2]
+
+ identifier = read1_id
+ if read1_desc:
+ identifier = identifier + ' ' + read1_desc
#use force quality encoding, if not present force to encoding of first read
force_quality_encoding = self.force_quality_encoding
@@ -621,17 +630,18 @@
rval.quality = "%s %s" % ( new_read1.quality.strip(), new_read2.quality.strip() )
return rval
def get_paired_identifier( self, fastq_read ):
- identifier = fastq_read.identifier
- if identifier[-2] == '/':
- if identifier[-1] == "1":
- identifier = "%s2" % identifier[:-1]
- elif identifier[-1] == "2":
- identifier = "%s1" % identifier[:-1]
- return identifier
+ read_id, read_sep, read_desc = fastq_read.identifier.partition(' ')
+ if read_id[-2] == '/':
+ if read_id[-1] == "1":
+ read_id = "%s2" % read_id[:-1]
+ elif read_id[-1] == "2":
+ read_id = "%s1" % read_id[:-1]
+ return read_id
def is_first_mate( self, sequence_id ):
is_first = None
if not isinstance( sequence_id, basestring ):
sequence_id = sequence_id.identifier
+ sequence_id, sequence_sep, sequence_desc = sequence_id.partition(' ')
if sequence_id[-2] == '/':
if sequence_id[-1] == "1":
is_first = True
diff -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 test-data/paired_end_2.fastqsanger
--- a/test-data/paired_end_2.fastqsanger
+++ b/test-data/paired_end_2.fastqsanger
@@ -1,6 +1,6 @@
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/2
CTCGCACGGCCGCCTCGACCACTTGGTCTGGCGTCATGCGCAATTTTTTCTCCATGTGGAACGGGCTGGTGGCGATGAACGTATGAATATGCCCCCGCGCT
diff -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 -r 7848d6fd1b7a3ef8330ad1b31f5a3521094ad706 test-data/paired_end_merged.fastqsanger
--- a/test-data/paired_end_merged.fastqsanger
+++ b/test-data/paired_end_merged.fastqsanger
@@ -2,9 +2,9 @@
NACATCAACACTCAGTAACGGCTGGCGCAAAATGGCATTGATTAACGAAGACTTCCCGCGCGTGAAGGCGCCGGCAAACGAGGCTCGGGAAGGGGCTCCCG
+1539:931/1
BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
-@1539:931/2
+@1539:931/2 this read has a description
GCGCGTAACGTTTCACCTCGAGATCGTTGTCGGCCGCAATCTCCTGGGGGCGCCATTCCGAATCGTAGTTGTCGGCGTCTTCCAGTGCGGCAAGGCATCGT
-+1539:931/2
++1539:931/2 this read has a description
aee_dcadeeWcaaadJbdaff[fffc]dcfe[dRc^\[^QVOZXXZSPFWNUUZ\P^`BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
@2971:937/1
NCGGAGACTTCGAGGCCATCCAGTCGATTGCCAAAGTCATCAAGGGGTCGACGATCTGCTCCCTTGCCCGTTCCAACGAGAATGAAATCCGCCGCGCGTGG
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Comment and naming fix for migration script 108.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1b95e5b076fe/
changeset: 1b95e5b076fe
user: jgoecks
date: 2013-01-17 19:56:26
summary: Comment and naming fix for migration script 108.
affected #: 1 file
diff -r 1314572f86e121aa734ea27a52601fbcdb450278 -r 1b95e5b076fee018402e5c94534a2e65ea6c5315 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
--- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -71,10 +71,10 @@
except Exception, e:
log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
- # Drop the Job table's exit_code column.
+ # Drop the LDDA table's extended metadata ID column.
try:
- job_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
- extended_metadata_id = job_table.c.extended_metadata_id
+ ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+ extended_metadata_id = ldda_table.c.extended_metadata_id
extended_metadata_id.drop()
except Exception, e:
log.debug( "Dropping 'extended_metadata_id' column from library_dataset_dataset_association table failed: %s" % ( str( e ) ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ccc640f65971/
changeset: ccc640f65971
user: kellrott
date: 2013-01-15 22:57:14
summary: Adding exception catches to database 108 downgrade step to fix failure issue with downgrading postgres database (foreign key rules violated...)
affected #: 1 file
diff -r 4bd419751ed3e8cc54913fa37389111a0e7faaa9 -r ccc640f65971632ea6de7a94bc01d24954102487 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
--- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -61,8 +61,15 @@
def downgrade():
metadata.reflect()
- ExtendedMetadata_table.drop()
- ExtendedMetadataIndex_table.drop()
+ try:
+ ExtendedMetadataIndex_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata_index' table failed: %s" % ( str( e ) ) )
+
+ try:
+ ExtendedMetadata_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
# Drop the Job table's exit_code column.
try:
https://bitbucket.org/galaxy/galaxy-central/commits/1314572f86e1/
changeset: 1314572f86e1
user: jgoecks
date: 2013-01-17 19:52:01
summary: Merged in kellrott/galaxy-central (pull request #109: Fixing database v108 downgrade failure)
affected #: 1 file
diff -r b14f68fb3e853313d944f934cf279b3517d4a7e6 -r 1314572f86e121aa734ea27a52601fbcdb450278 lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
--- a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -61,8 +61,15 @@
def downgrade():
metadata.reflect()
- ExtendedMetadata_table.drop()
- ExtendedMetadataIndex_table.drop()
+ try:
+ ExtendedMetadataIndex_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata_index' table failed: %s" % ( str( e ) ) )
+
+ try:
+ ExtendedMetadata_table.drop()
+ except Exception, e:
+ log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
# Drop the Job table's exit_code column.
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Use correct index tables for setting dbkey of Tophat2 outputs.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b14f68fb3e85/
changeset: b14f68fb3e85
user: jgoecks
date: 2013-01-17 18:56:03
summary: Use correct index tables for setting dbkey of Tophat2 outputs.
affected #: 1 file
diff -r f11abc888753d0b8efeb4cae653a21ca64a2ae5b -r b14f68fb3e853313d944f934cf279b3517d4a7e6 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -323,7 +323,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
@@ -342,7 +342,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
@@ -361,7 +361,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
@@ -380,7 +380,7 @@
<conditional name="refGenomeSource.genomeSource"><when value="indexed"><action type="metadata" name="dbkey">
- <option type="from_data_table" name="tophat_indexes" column="1" offset="0">
+ <option type="from_data_table" name="tophat2_indexes" column="1" offset="0"><filter type="param_value" column="0" value="#" compare="startswith" keep="False"/><filter type="param_value" ref="refGenomeSource.index" column="0"/></option>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Readd a request param that is no longer needed to a request between Galaxy and the tool shed to ensure backward compatibility.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f11abc888753/
changeset: f11abc888753
user: greg
date: 2013-01-17 16:40:01
summary: Readd a request param that is no longer needed to a request between Galaxy and the tool shed to ensure backward compatibility.
affected #: 1 file
diff -r 6e1e7bee1e6f1b10eed3ba023e6ed0badf16ed38 -r f11abc888753d0b8efeb4cae653a21ca64a2ae5b lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -711,7 +711,8 @@
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s' % ( tool_shed_url, repository_name, changeset_revision )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
+ ( tool_shed_url, repository_name, changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
response.close()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Test for reinstalling an uninstalled repository that now has a new changeset adding repository dependencies.
by Bitbucket 17 Jan '13
by Bitbucket 17 Jan '13
17 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6e1e7bee1e6f/
changeset: 6e1e7bee1e6f
user: inithello
date: 2013-01-17 16:18:36
summary: Test for reinstalling an uninstalled repository that now has a new changeset adding repository dependencies.
affected #: 2 files
diff -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 -r 6e1e7bee1e6f1b10eed3ba023e6ed0badf16ed38 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -711,6 +711,8 @@
url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
+ # Build the url that will simulate a filled-out form being submitted. Due to a limitation in twill, the reselect_tool_panel_section
+ # form doesn't get parsed correctly.
repo_dependencies = self.create_checkbox_query_string( field_name='install_repository_dependencies', value=install_repository_dependencies )
tool_dependencies = self.create_checkbox_query_string( field_name='install_tool_dependencies', value=install_tool_dependencies )
encoded_repository_id = self.security.encode_id( installed_repository.id )
diff -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 -r 6e1e7bee1e6f1b10eed3ba023e6ed0badf16ed38 test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
@@ -0,0 +1,105 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_1087'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_1087'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 1087 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+log = logging.getLogger( __name__ )
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+ '''Test installing a repository, then updating it to include repository dependencies.'''
+ def test_0000_create_or_login_admin_user( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_and_populate_column_repository( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ def test_0010_create_and_populate_convert_repository( self ):
+ '''Create and populate the convert_chars repository.'''
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ def test_0015_install_and_uninstall_column_repository( self ):
+ '''Install and uninstall the column_maker repository.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( column_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ new_tool_panel_section='column_maker',
+ strings_not_displayed=[ 'install_repository_dependencies' ] )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ def test_0020_upload_dependency_xml( self ):
+ '''Upload a repository_dependencies.xml file to column_maker that specifies convert_chars.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1085', additional_paths=[ 'column' ] )
+ self.create_repository_dependency( column_repository, depends_on=[ convert_repository ], filepath=repository_dependencies_path )
+ def test_0025_verify_repository_dependency( self ):
+ '''Verify that the new revision of column_maker now depends on convert_chars.'''
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ self.check_repository_dependency( column_repository, convert_repository )
+ log.debug( [ repository.id for repository in test_db_util.get_all_installed_repositories() ] )
+ def test_0030_reinstall_column_repository( self ):
+ '''Reinstall column_maker and verify that it now shows repository dependencies.'''
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ strings_displayed=[ 'Handle repository dependencies', convert_repository.name, self.get_repository_tip( convert_repository ) ]
+ # Due to twill's limitations, only check for strings on the (redirected) reselect tool panel section page, don't actually reinstall.
+ url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_column_repository.id )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( strings_displayed )
+ uninstalled_repositories = [ ( column_repository_name, common.test_user_1_name ) ]
+ self.verify_installed_uninstalled_repositories( uninstalled_repositories=uninstalled_repositories, installed_repositories=[] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Properly handle updates which have become available for tool shed repositories while they were ninstalled when reinstalling them.
by Bitbucket 16 Jan '13
by Bitbucket 16 Jan '13
16 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e6302ee56ed0/
changeset: e6302ee56ed0
user: greg
date: 2013-01-16 21:39:22
summary: Properly handle updates which have become available for tool shed repositories while they were ninstalled when reinstalling them.
affected #: 8 files
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -596,8 +596,7 @@
# In this case, a record for the repository will exist in the database with the status of 'New'.
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
- installed_rd, missing_rd = \
- get_installed_and_missing_repository_dependencies( trans, repository )
+ installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository )
else:
installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -66,7 +66,7 @@
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
def build_readme_files_dict( metadata, tool_path=None ):
- """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received repository_metadata."""
+ """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata."""
readme_files_dict = {}
if metadata:
if 'readme_files' in metadata:
@@ -128,10 +128,14 @@
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Installed repository dependencies container.
if repository_dependencies:
+ if new_install:
+ label = 'Repository dependencies'
+ else:
+ label = 'Installed repository dependencies'
folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
folder_id=folder_id,
repository_dependencies=repository_dependencies,
- label='Installed repository dependencies',
+ label=label,
installed=True )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
# Missing repository dependencies container.
@@ -777,17 +781,20 @@
sa_session.flush()
return tool_shed_repository
def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
- repository_metadata=None, metadata=None, repository_dependencies=None ):
+ repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
"""
Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also
contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy in two places:
- 1. During the tool shed repository installation process (via the tool shed's get_repository_information() method)- in this case both the received
- repository and repository_metadata will be objects.
- 2. When a tool shed repository that was uninstalled from a Galaxy instance is being re-installed - in this case, both repository and
- repository_metadata will be None, but metadata will be the tool_shed_repository metadata on the Galaxy side, and the repository_dependencies will
- be an object previously retrieved from the tool shed.
+ This method is called from Galaxy unser three scenarios:
+ 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received
+ repository and repository_metadata will be objects., but tool_dependencies and repository_dependencies will be None
+ 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both
+ repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the
+ tool shed if the repository includes definitions for them.
+ 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this
+ method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects
+ but tool_dependencies and repository_dependencies will be None.
"""
repo_info_dict = {}
repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
@@ -806,27 +813,24 @@
all_repository_dependencies=None,
handled_key_rd_dicts=None,
circular_repository_dependencies=None )
- if metadata:
- tool_dependencies = metadata.get( 'tool_dependencies', None )
- if tool_dependencies:
- new_tool_dependencies = {}
- for dependency_key, requirements_dict in tool_dependencies.items():
- if dependency_key in [ 'set_environment' ]:
- new_set_environment_dict_list = []
- for set_environment_dict in requirements_dict:
- set_environment_dict[ 'repository_name' ] = repository_name
- set_environment_dict[ 'repository_owner' ] = repository_owner
- set_environment_dict[ 'changeset_revision' ] = changeset_revision
- new_set_environment_dict_list.append( set_environment_dict )
- new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
- else:
- requirements_dict[ 'repository_name' ] = repository_name
- requirements_dict[ 'repository_owner' ] = repository_owner
- requirements_dict[ 'changeset_revision' ] = changeset_revision
- new_tool_dependencies[ dependency_key ] = requirements_dict
- tool_dependencies = new_tool_dependencies
- else:
- tool_dependencies = None
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ if tool_dependencies:
+ new_tool_dependencies = {}
+ for dependency_key, requirements_dict in tool_dependencies.items():
+ if dependency_key in [ 'set_environment' ]:
+ new_set_environment_dict_list = []
+ for set_environment_dict in requirements_dict:
+ set_environment_dict[ 'repository_name' ] = repository_name
+ set_environment_dict[ 'repository_owner' ] = repository_owner
+ set_environment_dict[ 'changeset_revision' ] = changeset_revision
+ new_set_environment_dict_list.append( set_environment_dict )
+ new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
+ else:
+ requirements_dict[ 'repository_name' ] = repository_name
+ requirements_dict[ 'repository_owner' ] = repository_owner
+ requirements_dict[ 'changeset_revision' ] = changeset_revision
+ new_tool_dependencies[ dependency_key ] = requirements_dict
+ tool_dependencies = new_tool_dependencies
# Cast unicode to string.
repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
str( repository_clone_url ),
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1406,7 +1406,7 @@
a local Galaxy instance.
"""
includes_tools = False
- includes_repository_dependencies = False
+ has_repository_dependencies = False
includes_tool_dependencies = False
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
@@ -1417,8 +1417,8 @@
metadata = repository_metadata.metadata
if not includes_tools and 'tools' in metadata:
includes_tools = True
- if not includes_repository_dependencies and 'repository_dependencies' in metadata:
- includes_repository_dependencies = True
+ if not has_repository_dependencies and 'repository_dependencies' in metadata:
+ has_repository_dependencies = True
if not includes_tool_dependencies and 'tool_dependencies' in metadata:
includes_tool_dependencies = True
repo_dir = repository.repo_path( trans.app )
@@ -1431,11 +1431,12 @@
repository_owner=repository.user.username,
repository_name=repository.name,
repository=repository,
- metadata=None,
- repository_metadata=repository_metadata )
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
repo_info_dicts=repo_info_dicts )
@web.json
@@ -1465,10 +1466,6 @@
def get_tool_dependencies( self, trans, **kwd ):
"""Handle a request from a Galaxy instance."""
params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- # If the request originated with the UpdateManager, it will not include a galaxy_url.
- galaxy_url = kwd.get( 'galaxy_url', '' )
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
@@ -1478,10 +1475,8 @@
break
metadata = downloadable_revision.metadata
tool_dependencies = metadata.get( 'tool_dependencies', '' )
- from_install_manager = kwd.get( 'from_install_manager', False )
- if from_install_manager:
- if tool_dependencies:
- return encoding_util.tool_shed_encode( tool_dependencies )
+ if tool_dependencies:
+ return encoding_util.tool_shed_encode( tool_dependencies )
return ''
@web.expose
def get_tool_versions( self, trans, **kwd ):
@@ -1506,6 +1501,53 @@
if tool_version_dicts:
return json.to_json_string( tool_version_dicts )
return ''
+ @web.json
+ def get_updated_repository_information( self, trans, name, owner, changeset_revision, **kwd ):
+ """Generate a disctionary that contains the information about a repository that is necessary for installing it into a local Galaxy instance."""
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
+ repository_id = trans.security.encode_id( repository.id )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
+ metadata = repository_metadata.metadata
+ if metadata:
+ readme_files_dict = suc.build_readme_files_dict( metadata )
+ if 'tools' in metadata:
+ includes_tools = True
+ else:
+ includes_tools = False
+ else:
+ readme_files_dict = None
+ includes_tools = False
+ # See if the repo_info_dict was populated with repository_dependencies or tool_dependencies.
+ for name, repo_info_tuple in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if repository_dependencies:
+ has_repository_dependencies = True
+ else:
+ has_repository_dependencies = False
+ if tool_dependencies:
+ includes_tool_dependencies = True
+ else:
+ includes_tool_dependencies = False
+ return dict( includes_tools=includes_tools,
+ has_repository_dependencies=has_repository_dependencies,
+ includes_tool_dependencies=includes_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repo_info_dict=repo_info_dict )
def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
"""Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
encoded_id = trans.security.encode_id( repository.id )
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -485,10 +485,14 @@
sub_folder.repository_dependencies.append( repository_dependency )
for repository_dependency in rd_value:
if trans.webapp.name == 'galaxy':
- # We have two extra items in the tuple, repository.id and repository.status.
- tool_shed_repository_id = repository_dependency[ 4 ]
- installation_status = repository_dependency[ 5 ]
- repository_dependency = repository_dependency[ 0:4 ]
+ if len( repository_dependency ) == 6:
+ # We have two extra items in the tuple, repository.id and repository.status.
+ tool_shed_repository_id = repository_dependency[ 4 ]
+ installation_status = repository_dependency[ 5 ]
+ repository_dependency = repository_dependency[ 0:4 ]
+ else:
+ tool_shed_repository_id = None
+ installation_status = 'unknown'
else:
tool_shed_repository_id = None
installation_status = None
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -711,8 +711,7 @@
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
- ( tool_shed_url, repository_name, changeset_revision )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s' % ( tool_shed_url, repository_name, changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
response.close()
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -374,21 +374,41 @@
if repository.uninstalled:
# Since we're reinstalling the repository we need to find the latest changeset revision to which it can be updated so that we
# can reset the metadata if necessary. This will ensure that information about repository dependencies and tool dependencies
- # will be current.
+ # will be current. Only allow selecting a different section in the tool panel if the repository was uninstalled.
current_changeset_revision, current_ctx_rev, includes_tools, has_repository_dependencies = \
shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository )
if current_ctx_rev == repository.ctx_rev:
- includes_tools = repository.includes_tools
- has_repository_dependencies = repository.has_repository_dependencies
- if includes_tools or has_repository_dependencies:
- # Only allow selecting a different section in the tool panel if the repository was uninstalled.
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='reselect_tool_panel_section',
- **kwd ) )
+ # The uninstalled repository is current.
+ if repository.includes_tools or repository.has_repository_dependencies:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reselect_tool_panel_section',
+ **kwd ) )
+ else:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reinstall_repository',
+ **kwd ) )
else:
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='reinstall_repository',
- **kwd ) )
+ # The uninstalled repository has updates available in the tool shed.
+ updated_repo_info_dict = self.get_updated_repository_information( trans=trans,
+ repository_id=trans.security.encode_id( repository.id ),
+ repository_name=repository.name,
+ repository_owner=repository.owner,
+ changeset_revision=current_changeset_revision )
+ has_repository_dependencies = updated_repo_info_dict.get( 'has_repository_dependencies', False )
+ includes_tool_dependencies = updated_repo_info_dict.get( 'includes_tool_dependencies', False )
+ if has_repository_dependencies or includes_tool_dependencies:
+ json_repo_info_dict = json.to_json_string( updated_repo_info_dict )
+ encoded_repo_info_dict = encoding_util.tool_shed_encode( json_repo_info_dict )
+ kwd[ 'latest_changeset_revision' ] = current_changeset_revision
+ kwd[ 'latest_ctx_rev' ] = current_ctx_rev
+ kwd[ 'updated_repo_info_dict' ] = encoded_repo_info_dict
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reselect_tool_panel_section',
+ **kwd ) )
+ else:
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='reinstall_repository',
+ **kwd ) )
else:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='activate_repository',
@@ -581,6 +601,45 @@
else:
text = ''
return text
+ @web.expose
+ @web.require_admin
+ def get_tool_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+ """
+ Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for the received repository name,
+ owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We need
+ it so that we can derive the tool shed from which it was installed.
+ """
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ if len( raw_text ) > 2:
+ encoded_text = json.from_json_string( raw_text )
+ text = encoding_util.tool_shed_decode( encoded_text )
+ else:
+ text = ''
+ return text
+ @web.expose
+ @web.require_admin
+ def get_updated_repository_information( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+ """
+ Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall an updated revision of an
+ uninstalled tool shed repository.
+ """
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_updated_repository_information?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ repo_information_dict = json.from_json_string( raw_text )
+ return repo_information_dict
def get_versions_of_tool( self, app, guid ):
tool_version = shed_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@@ -1149,7 +1208,7 @@
status = kwd.get( 'status', 'done' )
tool_shed_url = kwd[ 'tool_shed_url' ]
# Handle repository dependencies.
- includes_repository_dependencies = util.string_as_bool( kwd.get( 'includes_repository_dependencies', False ) )
+ has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) )
install_repository_dependencies = kwd.get( 'install_repository_dependencies', '' )
# Every repository will be installed into the same tool panel section or all will be installed outside of any sections.
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
@@ -1173,12 +1232,12 @@
response.close()
repo_information_dict = json.from_json_string( raw_text )
includes_tools = util.string_as_bool( repo_information_dict.get( 'includes_tools', False ) )
- includes_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_repository_dependencies', False ) )
+ has_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'has_repository_dependencies', False ) )
includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) )
repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
- if ( not includes_tools and not includes_repository_dependencies ) or \
- ( ( includes_tools or includes_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
+ if ( not includes_tools and not has_repository_dependencies ) or \
+ ( ( includes_tools or has_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
if includes_tools:
shed_tool_conf = kwd[ 'shed_tool_conf' ]
@@ -1241,7 +1300,7 @@
for tsr in created_or_updated_tool_shed_repositories:
tool_panel_section_keys.append( tool_panel_section_key )
new_kwd = dict( includes_tools=includes_tools,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
install_repository_dependencies=install_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
install_tool_dependencies=install_tool_dependencies,
@@ -1331,7 +1390,7 @@
includes_tools=includes_tools,
includes_tool_dependencies=includes_tool_dependencies,
install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
new_tool_panel_section=new_tool_panel_section,
containers_dict=containers_dict,
@@ -1364,7 +1423,7 @@
tool_panel_section_key = None
tool_panel_section_keys = []
metadata = tool_shed_repository.metadata
- # Keep track of tool dependencies define dfor the current repository or those defined for any of it's repository dependencies.
+ # Keep track of tool dependencies defined for the current repository or those defined for any of it's repository dependencies.
includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
if tool_shed_repository.includes_tools:
# Handle the selected tool panel location for loading tools included in the tool shed repository.
@@ -1381,7 +1440,7 @@
tool_shed_repository.installed_changeset_revision,
tool_shed_repository.ctx_rev,
repository_clone_url,
- tool_shed_repository.metadata,
+ metadata,
trans.model.ToolShedRepository.installation_status.NEW,
tool_shed_repository.installed_changeset_revision,
tool_shed_repository.owner,
@@ -1402,6 +1461,10 @@
changeset_revision=tool_shed_repository.changeset_revision )
else:
repository_dependencies = None
+ if metadata:
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ else:
+ tool_dependencies = None
repo_info_dict = suc.create_repo_info_dict( trans=trans,
repository_clone_url=repository_clone_url,
changeset_revision=tool_shed_repository.changeset_revision,
@@ -1410,15 +1473,15 @@
repository_name=tool_shed_repository.name,
repository=None,
repository_metadata=None,
- metadata=metadata,
+ tool_dependencies=tool_dependencies,
repository_dependencies=repository_dependencies )
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- shed_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
+ shed_util.create_repository_dependency_objects( trans=trans,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
reinstalling=True,
install_repository_dependencies=install_repository_dependencies,
no_changes_checked=no_changes_checked,
@@ -1472,40 +1535,62 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
- """Select or change the tool panel section to contain the tools included in the tool shed repositories being reinstalled."""
+ """
+ Select or change the tool panel section to contain the tools included in the tool shed repository being reinstalled. If there are updates
+ available for the repository in the tool shed, the tool_dependencies and repository_dependencies associated with the updated changeset revision
+ will have been retrieved from the tool shed and passed in the received kwd. In this case, the stored tool shed repository metqdata from the
+ Galaxy database will not be used since it is outdated.
+ """
message = ''
- repository_id = kwd[ 'id' ]
+ repository_id = kwd.get( 'id', None )
+ latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
+ latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
metadata = tool_shed_repository.metadata
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app )
- repository_dependencies = self.get_repository_dependencies( trans=trans,
- repository_id=repository_id,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- changeset_revision=tool_shed_repository.changeset_revision )
- if repository_dependencies:
- includes_repository_dependencies = True
+ if latest_changeset_revision and latest_ctx_rev:
+ # There are updates available in the tool shed for the repository, so use the receieved dependency information which was retrieved from
+ # the tool shed.
+ encoded_updated_repo_info_dict = kwd.get( 'updated_repo_info_dict', None )
+ updated_repo_info_dict = encoding_util.tool_shed_decode( encoded_updated_repo_info_dict )
+ readme_files_dict = updated_repo_info_dict.get( 'readme_files_dict', None )
+ includes_tools = updated_repo_info_dict.get( 'includes_tools', False )
+ has_repository_dependencies = updated_repo_info_dict.get( 'has_repository_dependencies', False )
+ includes_tool_dependencies = updated_repo_info_dict.get( 'includes_tool_dependencies', False )
+ repo_info_dict = updated_repo_info_dict[ 'repo_info_dict' ]
else:
- includes_repository_dependencies = False
- includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
- repo_info_dict = suc.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- metadata=metadata,
- repository_dependencies=repository_dependencies )
- if includes_repository_dependencies:
- # Discover all repository dependencies and retrieve information for installing them.
- required_repo_info_dicts = shed_util.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ # There are no updates available from the tool shed for the repository, so use it's locally stored metadata.
+ if metadata:
+ readme_files_dict = suc.build_readme_files_dict( metadata )
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ else:
+ readme_files_dict = None
+ tool_dependencies = None
+ includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.changeset_revision )
+ repo_info_dict = suc.create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ ctx_rev=tool_shed_repository.ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ repository=None,
+ repository_metadata=None,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
+ repository_name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \
+ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \
+ shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies )
+ if installed_repository_dependencies or missing_repository_dependencies:
+ has_repository_dependencies = True
else:
- required_repo_info_dicts = None
+ has_repository_dependencies = False
# Get the location in the tool panel in which the tool was originally loaded.
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
@@ -1533,14 +1618,14 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- # Populate the containers_dict from the metadata for the tool shed repository we're reinstalling, but make sure to include tool dependencies defined for
- # all of the repository's repository dependencies.
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- repository=tool_shed_repository,
- reinstalling=True,
- required_repo_info_dicts=required_repo_info_dicts )
+ containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict )
@@ -1561,7 +1646,7 @@
no_changes_check_box=no_changes_check_box,
original_section_name=original_section_name,
includes_tool_dependencies=includes_tool_dependencies,
- includes_repository_dependencies=includes_repository_dependencies,
+ has_repository_dependencies=has_repository_dependencies,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
install_tool_dependencies_check_box=install_tool_dependencies_check_box,
containers_dict=containers_dict,
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -33,7 +33,7 @@
${render_readme_section( containers_dict )}
<div style="clear: both"></div>
%endif
- %if includes_repository_dependencies or includes_tool_dependencies:
+ %if has_repository_dependencies or includes_tool_dependencies:
<div class="form-row"><table class="colored" width="100%"><th bgcolor="#EBD9B2">Confirm dependency installation</th>
diff -r f199b236ceb08e9588351223b6277861e9f87325 -r e6302ee56ed03e4cc9f95ff1720b08721ced5600 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -17,10 +17,10 @@
<%
# Handle the case where an uninstalled repository encountered errors during the process of being reinstalled. In
- # this case, the repository metadata is an empty dictionary, but one or both of includes_repository_dependencies
+ # this case, the repository metadata is an empty dictionary, but one or both of has_repository_dependencies
# and includes_tool_dependencies may be True. If either of these are True but we have no metadata, we cannot install
# repository dependencies on this pass.
- if includes_repository_dependencies:
+ if has_repository_dependencies:
repository_dependencies = containers_dict[ 'repository_dependencies' ]
missing_repository_dependencies = containers_dict[ 'missing_repository_dependencies' ]
if repository_dependencies or missing_repository_dependencies:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0