galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
June 2009
- 6 participants
- 50 discussions
19 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d3abf05d9272
changeset: 2456:d3abf05d9272
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Jun 19 11:21:13 2009 -0400
description:
Fix for importing history items ( datasets ) that include child datasets into a library.
1 file(s) affected in this change:
lib/galaxy/model/__init__.py
diffs (42 lines):
diff -r 075c0fd5b1d5 -r d3abf05d9272 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Jun 18 12:10:36 2009 -0400
+++ b/lib/galaxy/model/__init__.py Fri Jun 19 11:21:13 2009 -0400
@@ -628,7 +628,7 @@
hda.set_peek()
hda.flush()
return hda
- def to_library_dataset_dataset_association( self, target_folder, replace_dataset=None, parent_id=None ):
+ def to_library_dataset_dataset_association( self, target_folder, replace_dataset=None, parent_id=None, user=None ):
if replace_dataset:
# The replace_dataset param ( when not None ) refers to a LibraryDataset that is being replaced with a new version.
library_dataset = replace_dataset
@@ -637,6 +637,8 @@
# LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset.
library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info )
library_dataset.flush()
+ if not user:
+ user = self.history.user
ldda = LibraryDatasetDatasetAssociation( name=self.name,
info=self.info,
blurb=self.blurb,
@@ -649,7 +651,7 @@
deleted=self.deleted,
parent_id=parent_id,
copied_from_history_dataset_association=self,
- user=self.history.user )
+ user=user )
ldda.flush()
# Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
# Must set metadata after ldda flushed, as MetadataFiles require ldda.id
@@ -660,7 +662,10 @@
library_dataset.library_dataset_dataset_association_id = ldda.id
library_dataset.flush()
for child in self.children:
- child_copy = child.to_library_dataset_dataset_association( target_folder=target_folder, replace_dataset=replace_dataset, parent_id=ldda.id )
+ child_copy = child.to_library_dataset_dataset_association( target_folder=target_folder,
+ replace_dataset=replace_dataset,
+ parent_id=ldda.id,
+ user=ldda.user )
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
1
0
18 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/075c0fd5b1d5
changeset: 2455:075c0fd5b1d5
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Thu Jun 18 12:10:36 2009 -0400
description:
Fix for detecting metadata column types in tabular files, the number of lines read for detecting column types will always be the max setting, with previously detected column types being overridden if appropriate ( e.g., int overridden with float ). This will only occur in files whose data varies between types within the same column.
1 file(s) affected in this change:
lib/galaxy/datatypes/tabular.py
diffs (14 lines):
diff -r d83b9225b5dd -r 075c0fd5b1d5 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py Tue Jun 16 13:54:33 2009 -0400
+++ b/lib/galaxy/datatypes/tabular.py Thu Jun 18 12:10:36 2009 -0400
@@ -121,8 +121,8 @@
#"column_types": ["int", "int", "str", "list"]
first_line_column_types = column_types
column_types = [ None for col in first_line_column_types ]
- elif ( column_types and None not in column_types ) or i > num_check_lines:
- #found and set all known columns, or we exceeded our max check lines
+ elif i > num_check_lines:
+ # We exceeded our max check lines
break
#we error on the larger number of columns
1
0
18 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d83b9225b5dd
changeset: 2454:d83b9225b5dd
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 16 13:54:33 2009 -0400
description:
Add the hostname of the Galaxy instance to error reports.
2 file(s) affected in this change:
lib/galaxy/web/controllers/dataset.py
lib/galaxy/web/framework/base.py
diffs (54 lines):
diff -r e899101e63d1 -r d83b9225b5dd lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Tue Jun 16 09:55:15 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Tue Jun 16 13:54:33 2009 -0400
@@ -16,7 +16,10 @@
GALAXY TOOL ERROR REPORT
------------------------
-This error report is in reference to output dataset ${dataset_id}.
+This error report was sent from the Galaxy instance hosted on the server
+"${remote_hostname}"
+-----------------------------------------------------------------------------
+This is in reference to output dataset ${dataset_id}.
-----------------------------------------------------------------------------
The user '${email}' provided the following information:
${message}
@@ -51,7 +54,7 @@
return job.stderr
@web.expose
- def report_error( self, trans, id, email="no email provided", message="" ):
+ def report_error( self, trans, id, email='', message="" ):
smtp_server = trans.app.config.smtp_server
if smtp_server is None:
return trans.show_error_message( "Sorry, mail is not configured for this galaxy instance" )
@@ -61,9 +64,12 @@
# Get the dataset and associated job
dataset = model.HistoryDatasetAssociation.get( id )
job = dataset.creating_job_associations[0].job
+ # Get the name of the server hosting the Galaxy instance from which this report originated
+ remote_hostname = trans.request.remote_hostname
# Build the email message
msg = MIMEText( string.Template( error_report_template )
- .safe_substitute( dataset_id=dataset.id,
+ .safe_substitute( remote_hostname=remote_hostname,
+ dataset_id=dataset.id,
email=email,
message=message,
job_id=job.id,
diff -r e899101e63d1 -r d83b9225b5dd lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py Tue Jun 16 09:55:15 2009 -0400
+++ b/lib/galaxy/web/framework/base.py Tue Jun 16 13:54:33 2009 -0400
@@ -230,6 +230,12 @@
except socket.error:
return self.remote_addr
@lazy_property
+ def remote_hostname( self ):
+ try:
+ return socket.gethostbyaddr( self.remote_addr )[0]
+ except socket.error:
+ return self.remote_addr
+ @lazy_property
def cookies( self ):
return get_cookies( self.environ )
@lazy_property
1
0
Hello Len,
This is not currently possible in a clean fashion, and providing this
feature will take a bit longer than I had originally thought, so I've
opened the following ticket for this. You can "follow" the issue in
bitbucket if you want. Thanks for sending this.
http://bitbucket.org/galaxy/galaxy-central/issue/97/enable-ability-to-have-…
Greg Von Kuster
Galaxy Development Team
lentaing(a)jimmy.harvard.edu wrote:
> Hi Greg,
>
> We have a program that can either output 1 (a bed file) or 2 files (a bed
> and a wig file). We'd like to give the user the option to select whether
> they want 1 or 2 files; in galaxy, is there a way to make the NUMBER of
> outputs dependent on a variable? I tried this, but it didn't work--when i
> select "False" for $two_out, galaxy still generates two output files:
>
> ...
> <command interpreter="bash">test_cond_out.sh $output1
> </command>
> <inputs>
> <param name="two_out" type="select" display="radio">
> <option value="true">True</option>
> <option value="false">False</option>
> </param>
> </inputs>
> <outputs>
> #if $two_out == "true":
> <data format="bed" name="output1" />
> <data format="bed" name="output2" />
> #else:
> <data format="bed" name="output1" />
> #end if
> </outputs>
> ...
>
> Thanks,
>
> Len
>
>
>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/ee43bace03db
changeset: 2451:ee43bace03db
user: James Taylor <james(a)jamestaylor.org>
date: Thu Jun 11 15:41:57 2009 -0400
description:
Commenting out 'other dbkey' in upload
1 file(s) affected in this change:
tools/data_source/upload.xml
diffs (12 lines):
diff -r 143dde05e1df -r ee43bace03db tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Jun 11 15:37:19 2009 -0400
+++ b/tools/data_source/upload.xml Thu Jun 11 15:41:57 2009 -0400
@@ -25,7 +25,7 @@
</param>
</upload_dataset>
<param name="dbkey" type="genomebuild" label="Genome" />
- <param name="other_dbkey" type="text" label="Or user-defined Genome" />
+ <!-- <param name="other_dbkey" type="text" label="Or user-defined Genome" /> -->
</inputs>
<help>
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c44567359a03
changeset: 2452:c44567359a03
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Jun 12 09:56:23 2009 -0400
description:
Bug fix in for metadata, should correct functional tests ( I hope ).
1 file(s) affected in this change:
lib/galaxy/datatypes/metadata.py
diffs (21 lines):
diff -r ee43bace03db -r c44567359a03 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Thu Jun 11 15:41:57 2009 -0400
+++ b/lib/galaxy/datatypes/metadata.py Fri Jun 12 09:56:23 2009 -0400
@@ -295,13 +295,14 @@
def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd):
try:
values = kwd['trans'].db_builds
- except AttributeError: pass
+ except KeyError:
+ pass
return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd)
-
def get_html( self, value=None, context={}, other_values={}, values=None, **kwd):
try:
values = kwd['trans'].db_builds
- except AttributeError: pass
+ except KeyError:
+ pass
return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd)
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/e899101e63d1
changeset: 2453:e899101e63d1
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Jun 16 09:55:15 2009 -0400
description:
Fixes and functional tests for uploading directories of files from both the Admin view and the Libraries view ( requires new config setting ).
In addition to the config setting "library_import_dir" for the Admin view, the new config setting "user_library_import_dir" for the Libraries view will allow non-amin users to upload a directory of files.
The configured directory must contain sub-directories named the same as the non-admin user's Galaxy login ( email ). The non-admin user is restricted to uploading files or sub-directories of files contained in their directoy.
10 file(s) affected in this change:
lib/galaxy/config.py
lib/galaxy/web/controllers/library_dataset.py
templates/admin/library/new_dataset.mako
templates/library/new_dataset.mako
test-data/users/test1(a)bx.psu.edu/1.fasta
test-data/users/test3(a)bx.psu.edu/run1/2.fasta
test/base/twilltestcase.py
test/functional/__init__.py
test/functional/test_security_and_libraries.py
universe_wsgi.ini.sample
diffs (1056 lines):
diff -r c44567359a03 -r e899101e63d1 lib/galaxy/config.py
--- a/lib/galaxy/config.py Fri Jun 12 09:56:23 2009 -0400
+++ b/lib/galaxy/config.py Tue Jun 16 09:55:15 2009 -0400
@@ -81,6 +81,9 @@
self.library_import_dir = kwargs.get( 'library_import_dir', None )
if self.library_import_dir is not None and not os.path.exists( self.library_import_dir ):
raise ConfigurationError( "library_import_dir specified in config (%s) does not exist" % self.library_import_dir )
+ self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None )
+ if self.user_library_import_dir is not None and not os.path.exists( self.user_library_import_dir ):
+ raise ConfigurationError( "user_library_import_dir specified in config (%s) does not exist" % self.user_library_import_dir )
# Configuration options for taking advantage of nginx features
self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
self.nginx_upload_location = kwargs.get( 'nginx_upload_store', False )
diff -r c44567359a03 -r e899101e63d1 lib/galaxy/web/controllers/library_dataset.py
--- a/lib/galaxy/web/controllers/library_dataset.py Fri Jun 12 09:56:23 2009 -0400
+++ b/lib/galaxy/web/controllers/library_dataset.py Tue Jun 16 09:55:15 2009 -0400
@@ -1,4 +1,4 @@
-import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
from galaxy.web.base.controller import *
from galaxy import util, jobs
from galaxy.datatypes import sniff
@@ -193,7 +193,7 @@
file_format = params.get( 'file_format', 'auto' )
data_file = params.get( 'file_data', '' )
url_paste = params.get( 'url_paste', '' )
- server_dir = params.get( 'server_dir', None )
+ server_dir = util.restore_text( params.get( 'server_dir', '' ) )
if replace_dataset is not None:
replace_id = replace_dataset.id
else:
@@ -222,10 +222,17 @@
elif upload_option == 'upload_directory':
if server_dir in [ None, 'None', '' ]:
err_redirect = True
- if trans.app.config.library_import_dir:
- msg = 'Select a server directory'
- else:
- msg = '"library_import_dir" is not defined in the Galaxy configuration file'
+ # See if our request is from the Admin view or the Libraries view
+ if trans.request.browser_url.find( 'admin' ) >= 0:
+ import_dir = trans.app.config.library_import_dir
+ import_dir_desc = 'library_import_dir'
+ else:
+ import_dir = trans.app.config.user_library_import_dir
+ import_dir_desc = 'user_library_import_dir'
+ if import_dir:
+ msg = 'Select a directory'
+ else:
+ msg = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
if err_redirect:
trans.response.send_redirect( web.url_for( controller=controller,
action='library_dataset_dataset_association',
@@ -264,7 +271,7 @@
created_ldda_ids = str( created_ldda.id )
except Exception, e:
log.exception( 'exception in upload_dataset using file_name %s: %s' % ( str( file_name ), str( e ) ) )
- return self.upload_empty( trans, controller, library_id, "Error:", str( e ) )
+ return self.upload_empty( trans, controller, library_id, folder_id, "Error:", str( e ) )
elif url_paste not in [ None, "" ]:
if url_paste.lower().find( 'http://' ) >= 0 or url_paste.lower().find( 'ftp://' ) >= 0:
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
@@ -292,7 +299,7 @@
created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( created_ldda.id ) )
except Exception, e:
log.exception( 'exception in upload_dataset using url_paste %s' % str( e ) )
- return self.upload_empty( trans, controller, library_id, "Error:", str( e ) )
+ return self.upload_empty( trans, controller, library_id, folder_id, "Error:", str( e ) )
else:
is_valid = False
for line in url_paste:
@@ -318,13 +325,36 @@
created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( created_ldda.id ) )
except Exception, e:
log.exception( 'exception in add_file using StringIO.StringIO( url_paste ) %s' % str( e ) )
- return self.upload_empty( trans, controller, library_id, "Error:", str( e ) )
+ return self.upload_empty( trans, controller, library_id, folder_id, "Error:", str( e ) )
elif server_dir not in [ None, "", "None" ]:
- full_dir = os.path.join( trans.app.config.library_import_dir, server_dir )
+ # See if our request is from the Admin view or the Libraries view
+ if trans.request.browser_url.find( 'admin' ) >= 0:
+ import_dir = trans.app.config.library_import_dir
+ import_dir_desc = 'library_import_dir'
+ full_dir = os.path.join( import_dir, server_dir )
+ else:
+ imrport_dir = trans.app.config.user_library_import_dir
+ import_dir_desc = 'user_library_import_dir'
+ # From the Libraries view, users are restricted to the directory named the same as
+ # their email within the configured user_library_import_dir. If this directory contains
+ # sub-directories, server_dir will be the name of the selected sub-directory. Otherwise
+ # server_dir will be the user's email address.
+ if server_dir == trans.user.email:
+ full_dir = os.path.join( import_dir, server_dir )
+ else:
+ full_dir = os.path.join( import_dir, trans.user.email, server_dir )
+ files = []
try:
- files = os.listdir( full_dir )
- except:
- log.debug( "Unable to get file list for configured library_import_dir %s" % full_dir )
+ for entry in os.listdir( full_dir ):
+ # Only import regular files
+ if os.path.isfile( os.path.join( full_dir, entry ) ):
+ files.append( entry )
+ except Exception, e:
+ msg = "Unable to get file list for configured %s, error: %s" % ( import_dir_desc, str( e ) )
+ return self.upload_empty( trans, controller, library_id, folder_id, "Error:", msg )
+ if not files:
+ msg = "The directory '%s' contains no valid files" % full_dir
+ return self.upload_empty( trans, controller, library_id, folder_id, "Error:", msg )
for file in files:
full_file = os.path.join( full_dir, file )
if not os.path.isfile( full_file ):
@@ -346,7 +376,7 @@
created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( created_ldda.id ) )
except Exception, e:
log.exception( 'exception in add_file using server_dir %s' % str( e ) )
- return self.upload_empty( trans, controller, library_id, "Error:", str( e ) )
+ return self.upload_empty( trans, controller, library_id, folder_id, "Error:", str( e ) )
if created_ldda_ids:
created_ldda_ids = created_ldda_ids.lstrip( ',' )
return created_ldda_ids
@@ -425,11 +455,12 @@
if chunk is None:
temp.close()
return False
- def upload_empty( self, trans, controller, library_id, err_code, err_msg ):
+ def upload_empty( self, trans, controller, library_id, folder_id, err_code, err_msg ):
msg = err_code + err_msg
return trans.response.send_redirect( web.url_for( controller=controller,
- action='browse_library',
- id=library_id,
+ action='library_dataset_dataset_association',
+ library_id=library_id,
+ folder_id=folder_id,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
class BadFileException( Exception ):
diff -r c44567359a03 -r e899101e63d1 templates/admin/library/new_dataset.mako
--- a/templates/admin/library/new_dataset.mako Fri Jun 12 09:56:23 2009 -0400
+++ b/templates/admin/library/new_dataset.mako Tue Jun 16 09:55:15 2009 -0400
@@ -2,13 +2,15 @@
<%namespace file="/message.mako" import="render_msg" />
<%namespace file="/admin/library/common.mako" import="render_available_templates" />
-<% import os %>
+<% import os, os.path %>
<b>Create new library datasets</b>
<a id="upload-librarydataset--popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="upload-librarydataset--popup">
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file' )}">Upload files</a>
- <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ %if trans.app.config.library_import_dir and os.path.exists( trans.app.config.library_import_dir ):
+ <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ %endif
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history' )}">Import datasets from your current history</a>
</div>
<br/><br/>
@@ -26,135 +28,149 @@
<div class="toolForm" id="new_dataset">
%if upload_option == 'upload_file':
<div class="toolFormTitle">Upload files</div>
- %elif upload_option == 'upload_directory':
+ %else:
<div class="toolFormTitle">Upload a directory of files</div>
%endif
- %if upload_option == 'upload_directory' and not trans.app.config.library_import_dir:
- <p/>
- "library_import_dir" is not defined in the Galaxy configuration file
- <p/>
- %else:
- <div class="toolFormBody">
- <form name="tool_form" action="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id )}" enctype="multipart/form-data" method="post">
- <input type="hidden" name="folder_id" value="${folder_id}"/>
- <input type="hidden" name="upload_option" value="${upload_option}"/>
- %if replace_dataset:
- <input type="hidden" name="replace_id" value="${replace_dataset.id}"/>
- <div class="form-row">
- You are currently selecting a new file to replace '<a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, id=replace_dataset.library_dataset_dataset_association.id )}">${replace_dataset.name}</a>'.
- <div style="clear: both"></div>
- </div>
- %endif
- %if upload_option == 'upload_file':
- <div class="form-row">
- <label>File:</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <input type="file" name="file_data"/>
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <label>URL/Text:</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <textarea name="url_paste" rows="5" cols="35"></textarea>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Specify a list of URLs (one per line) or paste the contents of a file.
- </div>
- <div style="clear: both"></div>
- </div>
- %elif upload_option == 'upload_directory':
- <div class="form-row">
- <label>Server Directory</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <select name="server_dir">
- <option>None</option>
- %for dir in os.listdir( trans.app.config.library_import_dir ):
- <option>${dir}</option>
- %endfor
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Upload all files in a subdirectory of <strong>${trans.app.config.library_import_dir}</strong> on the Galaxy server.
- </div>
- <div style="clear: both"></div>
- </div>
- %endif
+ <div class="toolFormBody">
+ <form name="tool_form" action="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id )}" enctype="multipart/form-data" method="post">
+ <input type="hidden" name="folder_id" value="${folder_id}"/>
+ <input type="hidden" name="upload_option" value="${upload_option}"/>
+ %if replace_dataset:
+ <input type="hidden" name="replace_id" value="${replace_dataset.id}"/>
<div class="form-row">
- <label>Convert spaces to tabs:</label>
+ You are currently selecting a new file to replace '<a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, id=replace_dataset.library_dataset_dataset_association.id )}">${replace_dataset.name}</a>'.
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ %if upload_option == 'upload_file':
+ <div class="form-row">
+ <label>File:</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <div>
- <input type="checkbox" name="space_to_tab" value="Yes"/>Yes
- </div>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Use this option if you are manually entering intervals.
+ <input type="file" name="file_data"/>
</div>
<div style="clear: both"></div>
</div>
<div class="form-row">
- <label>File Format:</label>
+ <label>URL/Text:</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <select name="file_format">
- <option value="auto" selected>Auto-detect</option>
- %for file_format in file_formats:
- <option value="${file_format}">${file_format}</option>
- %endfor
- </select>
+ <textarea name="url_paste" rows="5" cols="35"></textarea>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Specify a list of URLs (one per line) or paste the contents of a file.
</div>
<div style="clear: both"></div>
</div>
+ %elif upload_option == 'upload_directory':
<div class="form-row">
- <label>Genome:</label>
+ <%
+ # See if we have any contained sub-directories, if not the only option
+ # in the server_dir select list will be library_import_dir
+ contains_directories = False
+ for entry in os.listdir( trans.app.config.library_import_dir ):
+ if os.path.isdir( os.path.join( trans.app.config.library_import_dir, entry ) ):
+ contains_directories = True
+ break
+ %>
+ <label>Server Directory</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <select name="dbkey">
- %for dbkey in dbkeys:
- %if dbkey[1] == last_used_build:
- <option value="${dbkey[1]}" selected>${dbkey[0]}</option>
- %else:
- <option value="${dbkey[1]}">${dbkey[0]}</option>
- %endif
- %endfor
+ <select name="server_dir">
+ %if contains_directories:
+ <option>None</option>
+ %for entry in os.listdir( trans.app.config.library_import_dir ):
+ ## Do not include entries that are not directories
+ %if os.path.isdir( os.path.join( trans.app.config.library_import_dir, entry ) ):
+ <option>${entry}</option>
+ %endif
+ %endfor
+ %else:
+ <option>${trans.app.config.library_import_dir}</option>
+ %endif
</select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ %if contains_directories:
+ Upload all files in a sub-directory of <strong>${trans.app.config.library_import_dir}</strong> on the Galaxy server.
+ %else:
+ Upload all files in <strong>${trans.app.config.library_import_dir}</strong> on the Galaxy server.
+ %endif
</div>
<div style="clear: both"></div>
</div>
- <div class="form-row">
- <label>Message:</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <textarea name="message" rows="3" cols="35"></textarea>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- This information will be displayed in the "Information" column for this dataset in the library browser
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <div style="float: left; width: 250px; margin-right: 10px;">
- <label>Restrict dataset access to specific roles:</label>
- <select name="roles" multiple="true" size="5">
- %for role in roles:
- <option value="${role.id}">${role.name}</option>
- %endfor
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Multi-select list - hold the appropriate key while clicking to select multiple roles. More restrictions can be applied after the upload is complete. Selecting no roles makes a dataset public.
+ %endif
+ <div class="form-row">
+ <label>Convert spaces to tabs:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <div>
+ <input type="checkbox" name="space_to_tab" value="Yes"/>Yes
</div>
</div>
+ <div class="toolParamHelp" style="clear: both;">
+ Use this option if you are manually entering intervals.
+ </div>
<div style="clear: both"></div>
- <% folder = trans.app.model.LibraryFolder.get( folder_id ) %>
- %if folder.library_folder_info_template_associations:
- ${render_available_templates( folder, library_id, restrict=True, upload=True )}
- %else:
- ${render_available_templates( folder, library_id, restrict=False, upload=True )}
- %endif
- <div class="form-row">
- <input type="submit" class="primary-button" name="new_dataset_button" value="Upload to library"/>
+ </div>
+ <div class="form-row">
+ <label>File Format:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <select name="file_format">
+ <option value="auto" selected>Auto-detect</option>
+ %for file_format in file_formats:
+ <option value="${file_format}">${file_format}</option>
+ %endfor
+ </select>
</div>
- </form>
- </div>
- %endif
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Genome:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <select name="dbkey">
+ %for dbkey in dbkeys:
+ %if dbkey[1] == last_used_build:
+ <option value="${dbkey[1]}" selected>${dbkey[0]}</option>
+ %else:
+ <option value="${dbkey[1]}">${dbkey[0]}</option>
+ %endif
+ %endfor
+ </select>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Message:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <textarea name="message" rows="3" cols="35"></textarea>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ This information will be displayed in the "Information" column for this dataset in the library browser
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <label>Restrict dataset access to specific roles:</label>
+ <select name="roles" multiple="true" size="5">
+ %for role in roles:
+ <option value="${role.id}">${role.name}</option>
+ %endfor
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple roles. More restrictions can be applied after the upload is complete. Selecting no roles makes a dataset public.
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <% folder = trans.app.model.LibraryFolder.get( folder_id ) %>
+ %if folder.library_folder_info_template_associations:
+ ${render_available_templates( folder, library_id, restrict=True, upload=True )}
+ %else:
+ ${render_available_templates( folder, library_id, restrict=False, upload=True )}
+ %endif
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="new_dataset_button" value="Upload to library"/>
+ </div>
+ </form>
+ </div>
</div>
%elif upload_option == 'import_from_history':
<div class="toolForm">
diff -r c44567359a03 -r e899101e63d1 templates/library/new_dataset.mako
--- a/templates/library/new_dataset.mako Fri Jun 12 09:56:23 2009 -0400
+++ b/templates/library/new_dataset.mako Tue Jun 16 09:55:15 2009 -0400
@@ -2,13 +2,15 @@
<%namespace file="/message.mako" import="render_msg" />
<%namespace file="/library/common.mako" import="render_available_templates" />
-<% import os %>
+<% import os, os.path %>
<b>Create new library datasets</b>
<a id="upload-librarydataset--popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="upload-librarydataset--popup">
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file' )}">Upload files</a>
- <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ %if trans.app.config.user_library_import_dir and os.path.exists( os.path.join( trans.app.config.user_library_import_dir, trans.user.email ) ):
+ <a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_directory' )}">Upload directory of files</a>
+ %endif
<a class="action-button" href="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='import_from_history' )}">Import datasets from your current history</a>
</div>
<br/><br/>
@@ -31,135 +33,153 @@
<div class="toolForm" id="new_dataset">
%if upload_option == 'upload_file':
<div class="toolFormTitle">Upload files</div>
- %elif upload_option == 'upload_directory':
+ %else:
<div class="toolFormTitle">Upload a directory of files</div>
%endif
- %if upload_option == 'upload_directory' and not trans.app.config.library_import_dir:
- <p/>
- "library_import_dir" is not defined in the Galaxy configuration file
- <p/>
- %else:
- <div class="toolFormBody">
- <form name="tool_form" action="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id )}" enctype="multipart/form-data" method="post">
- <input type="hidden" name="folder_id" value="${folder_id}"/>
- <input type="hidden" name="upload_option" value="${upload_option}"/>
- %if replace_dataset:
- <input type="hidden" name="replace_id" value="${replace_dataset.id}"/>
- <div class="form-row">
- You are currently selecting a new file to replace '<a href="${h.url_for( controller='library', action='library_dataset', id=replace_dataset.id )}">${replace_dataset.name}</a>'.
- <div style="clear: both"></div>
- </div>
- %endif
- %if upload_option == 'upload_file':
- <div class="form-row">
- <label>File:</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <input type="file" name="file_data"/>
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <label>URL/Text:</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <textarea name="url_paste" rows="5" cols="35"></textarea>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Specify a list of URLs (one per line) or paste the contents of a file.
- </div>
- <div style="clear: both"></div>
- </div>
- %elif upload_option == 'upload_directory':
- <div class="form-row">
- <label>Server Directory</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <select name="server_dir">
- <option>None</option>
- %for dir in os.listdir( trans.app.config.library_import_dir ):
- <option>${dir}</option>
- %endfor
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Upload all files in a subdirectory of <strong>${trans.app.config.library_import_dir}</strong> on the Galaxy server.
- </div>
- <div style="clear: both"></div>
- </div>
- %endif
+ <div class="toolFormBody">
+ <form name="tool_form" action="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id )}" enctype="multipart/form-data" method="post">
+ <input type="hidden" name="folder_id" value="${folder_id}"/>
+ <input type="hidden" name="upload_option" value="${upload_option}"/>
+ %if replace_dataset:
+ <input type="hidden" name="replace_id" value="${replace_dataset.id}"/>
<div class="form-row">
- <label>Convert spaces to tabs:</label>
+ You are currently selecting a new file to replace '<a href="${h.url_for( controller='library', action='library_dataset', id=replace_dataset.id )}">${replace_dataset.name}</a>'.
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ %if upload_option == 'upload_file':
+ <div class="form-row">
+ <label>File:</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <div>
- <input type="checkbox" name="space_to_tab" value="Yes"/>Yes
- </div>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Use this option if you are manually entering intervals.
+ <input type="file" name="file_data"/>
</div>
<div style="clear: both"></div>
</div>
<div class="form-row">
- <label>File Format:</label>
+ <label>URL/Text:</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <select name="file_format">
- <option value="auto" selected>Auto-detect</option>
- %for file_format in file_formats:
- <option value="${file_format}">${file_format}</option>
- %endfor
- </select>
+ <textarea name="url_paste" rows="5" cols="35"></textarea>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Specify a list of URLs (one per line) or paste the contents of a file.
</div>
<div style="clear: both"></div>
</div>
+ %elif upload_option == 'upload_directory':
<div class="form-row">
- <label>Genome:</label>
+ <%
+ # Directories of files from the Libraries view are restricted to a
+ # sub-directory named the same as the current user's email address
+ # contained within the configured setting for user_library_import_dir
+ user_library_import_dir = os.path.join( trans.app.config.user_library_import_dir, trans.user.email )
+ # See if we have any contained sub-directories, if not the only option
+ # in the server_dir select list will be user_library_import_dir
+ contains_directories = False
+ for entry in os.listdir( user_library_import_dir ):
+ if os.path.isdir( os.path.join( user_library_import_dir, entry ) ):
+ contains_directories = True
+ break
+ %>
+ <label>Server Directory</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <select name="dbkey">
- %for dbkey in dbkeys:
- %if dbkey[1] == last_used_build:
- <option value="${dbkey[1]}" selected>${dbkey[0]}</option>
- %else:
- <option value="${dbkey[1]}">${dbkey[0]}</option>
- %endif
- %endfor
+ <select name="server_dir">
+ %if contains_directories:
+ <option>None</option>
+ %for entry in os.listdir( user_library_import_dir ):
+ ## Do not include entries that are not directories
+ %if os.path.isdir( os.path.join( user_library_import_dir, entry ) ):
+ <option>${entry}</option>
+ %endif
+ %endfor
+ %else:
+ <option>${trans.user.email}</option>
+ %endif
</select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ %if contains_directories:
+ Upload all files in a subdirectory of <strong>${user_library_import_dir}}</strong> on the Galaxy server.
+ %else:
+ Upload all files in <strong>${user_library_import_dir}}</strong> on the Galaxy server.
+ %endif
</div>
<div style="clear: both"></div>
</div>
- <div class="form-row">
- <label>Message:</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <textarea name="message" rows="3" cols="35"></textarea>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- This information will be displayed in the "Information" column for this dataset in the library browser
- </div>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <div style="float: left; width: 250px; margin-right: 10px;">
- <label>Restrict dataset access to specific roles:</label>
- <select name="roles" multiple="true" size="5">
- %for role in roles:
- <option value="${role.id}">${role.name}</option>
- %endfor
- </select>
- </div>
- <div class="toolParamHelp" style="clear: both;">
- Multi-select list - hold the appropriate key while clicking to select multiple roles. More restrictions can be applied after the upload is complete. Selecting no roles makes a dataset public.
+ %endif
+ <div class="form-row">
+ <label>Convert spaces to tabs:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <div>
+ <input type="checkbox" name="space_to_tab" value="Yes"/>Yes
</div>
</div>
+ <div class="toolParamHelp" style="clear: both;">
+ Use this option if you are manually entering intervals.
+ </div>
<div style="clear: both"></div>
- <% folder = trans.app.model.LibraryFolder.get( folder_id ) %>
- %if folder.library_folder_info_template_associations:
- ${render_available_templates( folder, library_id, restrict=True, upload=True )}
- %else:
- ${render_available_templates( folder, library_id, restrict=False, upload=True )}
- %endif
- <div class="form-row">
- <input type="submit" class="primary-button" name="new_dataset_button" value="Upload to library"/>
+ </div>
+ <div class="form-row">
+ <label>File Format:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <select name="file_format">
+ <option value="auto" selected>Auto-detect</option>
+ %for file_format in file_formats:
+ <option value="${file_format}">${file_format}</option>
+ %endfor
+ </select>
</div>
- </form>
- </div>
- %endif
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Genome:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <select name="dbkey">
+ %for dbkey in dbkeys:
+ %if dbkey[1] == last_used_build:
+ <option value="${dbkey[1]}" selected>${dbkey[0]}</option>
+ %else:
+ <option value="${dbkey[1]}">${dbkey[0]}</option>
+ %endif
+ %endfor
+ </select>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Message:</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <textarea name="message" rows="3" cols="35"></textarea>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ This information will be displayed in the "Information" column for this dataset in the library browser
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <label>Restrict dataset access to specific roles:</label>
+ <select name="roles" multiple="true" size="5">
+ %for role in roles:
+ <option value="${role.id}">${role.name}</option>
+ %endfor
+ </select>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Multi-select list - hold the appropriate key while clicking to select multiple roles. More restrictions can be applied after the upload is complete. Selecting no roles makes a dataset public.
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <% folder = trans.app.model.LibraryFolder.get( folder_id ) %>
+ %if folder.library_folder_info_template_associations:
+ ${render_available_templates( folder, library_id, restrict=True, upload=True )}
+ %else:
+ ${render_available_templates( folder, library_id, restrict=False, upload=True )}
+ %endif
+ <div class="form-row">
+ <input type="submit" class="primary-button" name="new_dataset_button" value="Upload to library"/>
+ </div>
+ </form>
+ </div>
</div>
%elif upload_option == 'import_from_history':
<div class="toolForm">
diff -r c44567359a03 -r e899101e63d1 test-data/users/test1(a)bx.psu.edu/1.fasta
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/users/test1(a)bx.psu.edu/1.fasta Tue Jun 16 09:55:15 2009 -0400
@@ -0,0 +1,2 @@
+>hg17
+gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAAAATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGGGAGATATTTGGggaaatt---ttgtatagactagctttcacgatgttagggaattattattgtgtgataatggtcttgcagttaca-cagaaattcttccttattttttgggaa---gcaccaaag----tagggat---aaaatgtcatgatgtgtgcaatacactttaaaatgtttttg-----ccaaaataatt----------------aatgaagc--aaatatggaaa-ataataattattaaatctaggtgatgggtatattgtagttcactatagtattgcacacttttctgtatgtttaaatttttcatttaaaaaaaaactttgagc-----tagacaccaggctatgagctaggagcatagcaatgaccaa----------------------------------------------------------------------------------------------atagactcctaccaa----------------------------------------------
----ctc-aaagaatgcacattctCTGGGAAACATGTTTCCATTAGGAAGCCTCGAATGCAATGTGACTGTGGTCTCCAGGACCTG-TGTGATCCTGGCTTTTCCTGTTCCCTCCG---CATCATCACTGCAGGTGTGTTTTCCCAAGTTTTAAACATTTA------CCTTCCCAGTGGCCTTGCGTCTAGAGGAATCCCTGTATAGTGGT-ACATGAATATAACACATAACAAA-AATCATCTCTATGGTGTGTGTTGTTCCTGGGGTTCAattcagcaaattttccctg-ggcacccatgtgttcttggcactggaaaagtaccgggactgaaacagttgatggccca-atccctgtcctct---taaaacctaagggaggagaTGGAAAGGGG-CACCCAACCCAGACTGAGAGACAGGAATTAGCTGCAAGGGGAACTAGGAAAAGCTTCTTTA---AGGATGGAGAGGCCCTAGTGGAAT-GGGGAGATTCTTCCGGGAGAAGCGATGGATGCACAGTTGGGCATCCCCACAGACGGACTGGAAAGAAAAAAGGCCTGGAGGAATCAATGTG-------CAATGTATGTGTGTTCCCTGGTTcaagggctgg-gaactttctcta-aagggccaggtagaaaacattttaggctttctaagccaag--gcaaaat-tgaggatattacatgggtacttatacaacaagaataaacaatt---tacacaattttttgttgacagaattcaaaactttat----agacacagaaatgcaaatttcctgtaattttcccat-gagaactattcttct--tttgttttgttttgcgacAGGGTTGCGCtgatcctcccgcctcagtctccctaagtgctgagatgttgcaggaagtcagggaccccgaacagagagatcggctggagccgtggcagaggaacataaattttgaagatttcattttaatatggacacttatcagttccc
aaataatacttttataattttttatgcctgtctttgctttaatctcttaatcctgttatcttcataagctaaggatgtacgtcacctcaggaccactgtgataattgtgttaactgtacagattgattgcaaaacatgtgtgtttgaacaatatgaaatcagtgcaccttgaaaaagagcagaataacagcaatttttagggaacaagggaagacaactataaggtctgactgcctgcggggtcgggcaaagggagccatatttttcttcttgcagagagcctataaatagacctgcaagtaggagagatattgctaatttcttttgctagcatggaatattaatattaacaccctgggaaaggaatgcattcctggggggaggtctataaatggccgctctgggaatgtctatcctacgcaatggagataaggactgagatacgccctggtctcctgcagtaccctcaggcttactagggtggtgaaaaactccgccctggtaaatttgtggtcagaccagttttctgctctcgaacactgttttctgttgtttaagatgtttatcaagacaatacgtgcaccgctgaacacagacccttatcagtagttctcctttttgccctttgaagcatgtgatctactccctgttttacaccccctcaccttttgaaacccttaataaaaaacttgctggttt-gaggctcaggtgggcatcacagtactaccgatatgtgatgtcacccccggcggcccagctgtaaaattcctctctttgtactctctctctttatttctcagccagctgacacttatggaaaatagaaagaacctacgttgaaatattgggggcaggttcccccaataTCTGGTGCCCAACGTGGGAtactgagattacaagcatgagccactgcatctggcctcttcttttgatttttttttttcaaacttttacaaatgtagaaaccattcttagcttttgggcatta
ccaaacccggcagtgg-caggctcggttcaccaacgtcatttgcagttccccgCTTTATGTTATGGgttttgttttgttttgtttttttt-attgagacagagtttcactcttgttgcccaggctgtagtgcaatggtctgatcttggctcactgcaacctccacttcccaggttcaagccattctcctgcctcagcctctcaagtagctgggattacagacactcaccaccacacctggctaattttgtatttttagtagagatgaggtttcaccatgttggccaggctggtctcgaaatcctgacctcaggtgatccacccaccttggcctcccaaagtgctgggattacaggcttgagctaccacgcctggctGGGTTGGTTCTCAATGGAGTGGTTTGTTTTTGGAGCTGCTCT-GCGCAGtggggaccagaataggcctg-------------------ggttcctagcccattgctattcctt----accagctgtggattctaaggaaagtcatttaacctcgctggaccttag-attcctcatccctgaaGCCCAAGGGTaaaacaaaacaaaacaaaacaaaacaaaccaaCCCATCATGTAAAGCGGGGAACTACAAACGATACAGGTGAAACATGCCTACCACACCACTCACAGGCT--ATGATGACAAAAACGTGGCTACATCTGGGACCACCCCCCAACCCCCACTTTGTACGTAGGAAATACGGAGTTGAGGATGGAGACCCACAGTATGTCCAGAGTGTCCCCAAAGGCCACAGTGCCCGCCTGGAGCCCTCCAGAGAGCGTGCACTCCCTGGGGTGCCAGCCAGAGACAACTTGCCCTGAGGCTTGGAACTCGATTCTCCGCGTGCCAGAGAAGGGGTGGGACTTCAGAACCCCCAACCCCGCAATCTGGGTCGGGGAGCCTGGCGCACTGCGGGCCGCTCCCTCTAACCCTGGGCTTCCCTG------GCGT
CCAGGGCCGTCGG-----------GGCCGAGTCCCGATTCGCTCCCACCCCGAAGCCGCGCCAGGACCAACGAGGGCGCAGCCGTATGCCCCAGCCCGCTCCGCGGAGCCCCTCACAGCCAcccccgccccgaccgcgccccgcgcggcTCGAAGCACCTTCCCAAGGGGCTGGTCCTTGC----------GCCATAGTCGCGCCGGAGCCTCTGGAGGGACATCAAGGATTTCTC-GCTCCTACCAGCCACCCCCAAATTTTTGGGAGGTACCCAAGGGTGCGCGCGTGGCTCCTGGCGCGCCGAGGCCCTCCCTCGAGGCCCCGCGAGGTGCACACTGC---------GGGCCCAGGGCTAGCAGCCGCCCGGCACGTCGCTACCCTGAGGGGCGGGGCGGGAGCTGGCGCTAGAAATGCGCCGGGGCCTGCGGGGCAGTTGCGCAAGTTGTGATCGGGCCGCTATAAGAGGGGCGGGCAGGCATGGAGCCCCGTAGGAATCGCAGCGCCAGCGGTTGCAAGGTAAGGCCC-CGGCGCGCTCCTTCCTCCTTCTCTGCTGGTCTTTCTTGGCAGGCCACAGGGCCCCACACAACTCTGGATCCCGGGGAAACTGAGTCAGG-AGGGATGCAGGGCGGATGGCTTAGTTCTGGACTATGATAGCTTTGTACCGAG-----TTCTAGCCAGATAGAAGGTTACCGGGAGCTGGGGAGCGTTGGATTTGCTGCTGGGCTGTGCCGGTGCCCAGAAGGCA------GGACCTTGCAGAACCAGCCAGGTCCCTGGGAGACTGTCAGACCCACCAACCTGGTGGCATTCGCAGAGCTGAGATGCATTGGAAATTGCCTTGGGCACATCCCCAAAGATCAGGATGTCCCACCCCAGTCTGAAGGAGA---TAAAGTTGGGGGTAGGAGAGACGCAG-ATGCAAGTGATCAGTCTC---AGTCCCAGACATTGCCTTGCTCTGCGGGTAGGAAT
TCAGGATTCATTTTCCAGGGAAG--------TTCCTGACCTCTGAATGAGAGGGGCTGTGTAAGGCCAATGCCTGGG-AGGAAGGCAAGGATGAGTAGAGGTGGGGGGAAACAAGTGTCAGGAAGA--------------------------------------------CTCAAA---------ATCTTC--------------------------------------------------------------CAGAGAAATTGT-----GCAGGGTCTTACCAGATCTGTCCTCAAAGCCATGCAAATTGCCTTCTTTGCAATGCAT-ACAATGAGGTGTCTCTGGGGGTCAGAACTGG-----------------------TTATTAGGGAACTTCTAGCCAGGACTGCTAAATACGCGCTGTTGG---------CCCACCAGGCTCACCTATAGCCT-TCCTTCAGTCTGGGCTTGGTTTGGATTTCACTGTGGGTGCCATCGCCTTTACACTCCTGTTTCTATAGTTTAAAGATAGTGGTGCTTTGGGAAAG---TGACTCCTTAAATACAGTTAGGTCCAAGTGA-GACAAGTGGCCTGGCTGTCATTTCAGAATAGCAGCTTCCAAGAGG----------TGATTAATTTCTGTTGGAAGGGTGAT-CTTTGGGGAGGT--GGGTGAAGAGCAGAGACTTGGTGGTACCGTTCCAGGAGCACAGGCTCTCT-----TCCTTTGCA--GTGCAGAATGACCTCTGGCAGCCGGAGTTGTGTTTGTT--------CTGTAGGATTCTGAGGTGGGCCATGGGCAGCTGGAACTGGG-----GAATTTTGCCAATCTCTTTCATATTAGGATTGTCTGCAGAACCAGATATGGAGG------CTTCTAGCAACGTGAGTGCTCCTGTTCTAATGCCCTTAGAAACAAGAAGGCCACACTGATCATTTCTCTCACTTAGGCAGGGAG
ACAAGGCAAGAGAGAAACAGT-----------------GGATGC--TTTTAGGTTCTTTCCCTTCCCAAGCAGTTGTGGACATTGGGCTGA-GGGGAACATTTCCACATTGGCTAAAGGAGCGTCCTCCTCATATTTTGTACATTTTATACCCAA--AATAA-CTCTTCTTGGTATTT-GGGGAAATATTTTCCTCCCCGTCC------------ATTCCAGGAAATGGCTCCAAGTGCCAAGGACAGAGCCAGGGAAGTTGCAATGAATTCCTGCCCGTCAGCCCCAGGCAGATGCCTTGCACGTCTGAGTGGCCCATGCAGAGCGTGGAGGTGGCCGCC----------------ACGGAACC-TGGGTCAATGT-CCCACCCCCG----CTTAGATGCCA-CCAGGGG--CGTGGGAGCCAAGGAG--AGAAGAGGGGCTCCAGGAAGGTAGAGTCCTTGTGTCTTGTGCATCTGTGAACAGCACTGGTATGATTTAAAGGAAAATTGAGCCAAATTTTCCGGCAGTCAGTT-----ACCCCATCCCCACCGGGGTAGGAGTCTGGCAGCCGCAGCTCCATTCTGGCCAGTCGGCAGAGAGCCTTGAAATTCTTCTTTGTCCACACAGTTGTCTCAGAGAAACAG--AGAGGTT-GTTTCTGCTTAAAAACAACACACTTGGTGTCTGGGCCCACAGACTCCTTTGCACTTATTCCACGTGTGACAGCCAATGTGCCTCGTTGCTTAGCAGACAGCATGTTACCGTCTTTCCTGCTCAGTTTGTTAG--------------CTCTATGGAATGGAATTTATAATCAATGCCCATACCAACATTTCACTAATATCATAGGAGATTTAGTCTCCATCTGGGTGTACATTACATTTGC--TCTGGGG-TGCTCCAGGC--TGGGGGGTTGCCAAGGAAGAGAAGAGAAACCGCAGAGAAGAC---GGGAGGGCAGGGCAGGGGTCTCTG
AGAAGGGGAGGGGTCCCAGAGTGCAGGAGCAGGAGCCAGGCTC---------ATGAAAGGGGCCACGGGCGGGAGTATCCAGGGACGGCAGTCAAGATGGAGCACAGCTTAGG--AAGCTGAAGGGAATCCTGGCCCACCTGGGTGCTAGAGGGCACATAGGAAGTGCAGGAAGCAGACCAAGGTCCCCAAGAGAGGGAGACCTGGACGCTGAAGCATTTTCTGTCTTTATTAAG-------------ACAACTCCGTAAGAATTCCTGCTGGGCCAAAGTGAATTCTAGGATGCGACTTTAAGATGGGAGCAAGCGAACCATTGAGGAGGCAGGTTACCCTAGTTAGCCAATGCAGATCGAGAATGGGAAATCTTTCatttattcatgcaacagatatttaacgaagccctgccgtgttccaggcctgtgatagatgctggaacaggtacagaga----------tAc-------aggtgtcattaattgatcaggg--caacctctc---cttctgagt--cttgctggagcttcagatgc-ccctcacacagagctcgagggagcctc-aacaattgatcagaagtcaggcaccatggctcacgcatataatcccagcactttgggaggccaaggcaggtggatcactggagcccaggagttccagatcagctggggcaacatggcaaaaccccatctctattaaaaaaaaaaaaagtaactggatgtgatggtacacacctgtagtcccagctacttgggaggctgagaggtgggagaattgcttgagcccgggaagtcgggggtccagtgagccttgatcacaccactgcactccagcctgagtgacagagcaagaccctgacacacacacacacacacacacacacacacacacagattagagctgaaacaggagtagaaacctatctg-tatctctgATGA-GATCAGATC---------TTTCTGATGAACAGAAAGAATG
TAACCCCTGTACTCACACCCTCTCTGCTGGTTACATATGTTAACACGATTTCTCAAATGAGGCTTTTGGTTGCAAATAAGAGAAAATCACTCACGCT-GGCCCTGTG--TTTTTCAAATTGTTTATTGTGATCAACATTTGAAAAAAGAGCCGAGACTCTCAAGAGTGCATTACCCACGGTAAGGGTGAATTTT-ACTTCTTGACACTTATTTCTCTTACATGTATCTATCTGTCTC-----AAATGAAAAATATATTTAGAAAGTTGAAAGCTATCCAAGTGAGTATAAGAAAAGAGTATCTCACCCTGAAGGCTAAGGACAGGGAGGGC---------------------------CACCAGGCCTCACGAGGACCCAGGAACCACAAAGAAGGCT-AGGAAGGAGCACAGGCGGTGACCATACTCTGGCTCAGTGGCTATGTGGGCTCTGGTCTCTCTCAGCTGTTCCATGCATATGAGGCCAAATGTGGCTACCCTAGAGCTTCTGAGCCCTCAACAGAGATGAACTGGACTCTCTGCAGCCCCACTCTAAATTCCTAAGAGAGAAGTTGATTGACCCAATCAGGGTCAGGAGAAGGAAGGGAGGAGGAAAGGGAGGAGAGAAGAGCCTCTTCGTCTCTTGCCTACCACTGGCCAGGCAATTGTAGCCAAGGGGGCTGGAGTGTAAATGCAAACATAGCCATCAAGGGTtgtgtatgtgtgtgtgtgtgtctgtgtgtgtgtatgtgtgtCTCTTGGGTAGGTTAGA-TCTCCCAGGAGGTCCCTACTAAACAGACTTAAGCCCGCAAAATTTTAGCTCTCCAGCCTCACACACTCCACCCCTCTACCATATTGAATCTTCCCAAACCAACTATGGCTTTCCCTAACTCCGGAGc------ttggcctggaatgccctgcttcccctctttcccctggggaacgcctgtccttcaggcctcagttcacacactgcctcccttgca
aagctctccTCCCATCCCCGGAGTCCCT--CTTCCCCTTTGTTCTTTGGGTTCTATGCTTCTTCCCTCATAACTCCCACCAGGTTGTGTTAAAATGAGTTGTTCAAGGTCCTGTCTGTTCCACTAGATTCTGAGCAACTTGGAGAACGAAGATCCAAACTTCGCTGCCTTTATTTCCTCCTTTGTTCTTTTCTCATCCCCAAGTCCCTTCCAACTTGGAGTTATgaagaaaggaaggaaggaagggtgggagggaagaaCAGGAGGGGATCCCACAGG-AGAATGTGTATAGGGAGAGGACTCAGACTAGCTAAAGCTTTTCCCTCATAATTAATAGCAAATACCATGTTACCTGAATTTAATTCACAGTAGCATACAAAAGACTCGCTTTGTTCT-------CCCCA---------TTGATGTCATCAGAGG--------------------GCTGTGGG--------------CAGGCCTAATCTTGGCTCAGGAGGCCCTCCAGCCTGGATCTAAAGAGCAGCAGATGggccaggctcggtggctcatgcctgtaatcccagcattttgggaggccgaggcgggtggatcacgaggtcaggagtttgagaccagcctggccaagatggtgaagcctcgtctctactaaaaatacaaaaattagccaggtgcggtggtgggcgcctgtatttccagctacccgggaggctgaggaggctgaggcaggagaatcgcttgaacccgggaggcggaggttgcagtgagccgaggtcacgccactgcactctagcctgggcaacagagcaagactccgtcaaaaaaaaaataaaaaaataaaaaaataaaaaaaataaaGAGGAGCACACATCTCTGCCCATCCTAACTCCCACTTTGACATTGAGGTCCCCAGGATGGAGGGTCTGCCTCCATCTGCCTTGTCCCCTG-CAATGGTGGGAAGGTGATGGAGCTCAAGTCTAGAGGCCACC
AGCTTCTTAGGGAGG--TAGGAGGTG---------------GAGGGTGGGGTGC-GGGCCCTGCACACAACTGCCAAGTGAGGATGGGGGTGGG-GTCCACCTGAGGATAAGTAACAGTGAGGCTGGTGCAGAGGACCCAGGTGGAGGTAGACAGCAGAATTTGTGGTGGGGT--GGATGGCAC-ATTATATAAGCCTCTCTTGC------TGCCCTGT---TTACTGAGATTGTTTCAttatcttttttggcttttgtttttaagagatggggtcttgctgtgtcacacaggctggagtgcactgtgtgatcatacctcactgcagcctcgacatcctgggctcaggcaaacctcccaccttggcctcccaagtagctgggaccacaagcgtttgccaccacactcagctatttttatttttattttta--ttttttttagagatggggtcttgctgtgtcgcccaggctggtcttgaactcctgggctcaagcgatcctcctgccttggcctcccaaagccctgggattataggctgagccaccacacccagccACATTTCATCTGTGCAGCTCCAGGGGCTCCACATTCT-ACTCTTCTCATTTCTTCTCCAGGGTACCC----------ATGGCAAGGGATGAGGGT--AGAAGATGGGGCA--GCCAGGCCTTGATTAAAGGAGAAGGAAGGCAGCCTGTGGAGAGG---GCAGCC---C---AGGGAG---TGCAGAGAGAAGTGGGCCATGAGGGAGA---CAGCAGAGTGCAGGCTGCGTCC---CAAATGAGCATACAGCCCACTGTGAGCCCACC--ATCTTCCTAGA-GA--CCCCTCTCCTCTCC-AGGAGCTGCTTCAGTAGCACTCA---------GAGGAAAGAATGATGC--------TGTATCAACATTTCAGCAGCTCATCTTTTAACTCTAAGAAAATGGCAGCTCCTAAATGTTCAA--AACTGCTTTG
GAAACTTCT---GGAGAGAGGTTTTGCAGCTCAGGCAGACAGCTGATCGCGGCCTTTCTTCCACCCCAACCCATGCTCTCCCCATGCT--CTCCTGCCACAGCTGCAGCGGGCCCCTGGGTCCTACATTTGCAG-CCCTTTGTCTCTGAGCT-----CAGACTTCCAATTCCAAGCGGCAGCTGGGCAGGCTCACCAGCATGT---CCAGCCAGTACTAGGACATCAGCAGGAGC----CCAACCACCTCTTTCCAAAATCTCTCCTCATGTCTCTCCTAGTTTCCATCTCCATCCTTCTAGTCAGCCAGGCTGAAAACATT-----------------TGCTCCTCAGGGTGCAGAAGGGAAAGCTTTGCCTCCCTTCCTGGTGCTCACTGCCCCTGCGATTCCAGCCCAAGCCCTCCCCGGCTCCTCACC----------CTGGTGTCAGCTGGAAGCCACCATCTCCTAAACCCACCTGtgttcttccacctctgc--------cagggctgc-cctctcctccaccttcacaaactcaattcctacccattctcaggtcccttatcaaatgccatctcctccatgatgcctccctgattccccTGCTGGAaataatggtgataacagctaag--gcattggggttggctacgtgccaggcaaggagttggcactttacatgctttatctcatttcagccacataacatcgacaggt-ggcattatgattcatatcatccccatctgatagccaggaaaactgagtcccagagaggttagc-cactttcctagggccCTGTGCTCTGACTCAAGCATAGCTCTGAGGAACTCTAGCATTCATCAGTTTAAGCACCATGACTTTCTTTGCTGAGTCACCCAAGGCAT-TTCTTCATTTAAATGTTCTTCCTTGGCCAGGCGCAGTGGCTCAggcccaatgcggtggctcacgcctgtaatctcaacactttgggaggccgaggtggg
cagataatctgaggtcaggagttcaagaccagcctggccaacatggtgaaaccccatctctactaaaaatacaaaaaaatgaggctgggcgtgatgactcacacctgtaatcccagcactttgggaggccaaggcaggtggattacatgaggtcaggagttcgagaccagcctggccaacatggtgaaatcctatctctattaaaaatacaaaaaattagccaggcatggtggcaggcacctgtaatcccagctacttgggaggctgaggcaggaaaatggcttgaacccgggaggtggaggttgcagtgagccaaggttgcaccattgcactccagcctgggcaaaaagagggaaacatcgtctaaaaaagaaaaaaaaaaaattagccaggctgggtggtgcatgcccgtaattccagctactcaggaggatgaagcaagagaattgcttgaacccaggaggcagagattacagtgagctgagatcacaacactgcactccagcctaggtaaagaacaagactccatctcaaaaataaataaataaaaataaaTGTTCTTCCTTGCAATGAAGTTAAATATGTAAATTCTCAAACCAGTTGCTTAAGGGCACAGTTTTGTTCTTTACCTATATTTTTAACAAATATTTTATGTAAGTAGTTGAC-AAAATCAAATACTGT-GTACACTACCGAGGCTTCCCTGGGAAAGCCATCAG-CCTCTGCCCCATCCCTTCCCACTCCTGATT-CCACTTTCCTGTGTTTCCATATCTTTTTCATGTCTGTTTCTGGCCCACAGTGGGCGATCAATACATGTTAGCCACCAACCATCAAACCTATATTGAGTAATTATGGTATGTCAGGCACTATGCTCAATGAAATTGTAttaggcttgtacaaaagtaattgtggtttttaagagtaatggcaaaaacggcagttactttcgcaccaacTATTTGCTGCCTTGAATTATTCCTCCTCTC-C
TCATCCCTAAACCCTGCTCCTCCCAGCCATTCTTCCTCCCCTTCTTGGGCCATGGCCAGGCCCCACCCAGGTACTAAGACTCAGGTGAACCAAGGAAGACTTAATGCCCACTCTTTTCTGATGCCCATGTT--GGCATGTGTTAAGtcggttagcattaagtttggctgcatttagcagagacccaaaagaacagtgccttttaaaaggcagaggttatgtctctcacacacacccagcacaagtccaag-------------------------accagcatggcatctcagctccatcaa--cctcaggaaccgagctcctgcagctccctgccctgcagttgataaggtgaggtctttgtcctcctggttcaagatggtgctagaatgttggctaccatatctatagtccaggcatcagaatggagcaagggatgaaaaaggaagagatgaaggcacacgacaggttcctgagagctggcacaggacacttctgcttatatttcactggccagaacttagtcacatggtcacacctagttgggagactctgagaagtaa----agtatttattctagatggccatatccctacc-taagacttggagttttctatgactggggaagaacggaagacaagatattgggaaagactagcagcctctactaAAAGGGTGATCtgtgttgatgtgcgtgtgtgtgtgatgtttgtatg---agcatgtgtgt-tatgtgttgt--gtgtTGGTGGGGCA--GATTCTTGCGAGCACTTTGGTCTCAGATGGACCTGCTACCAGTTCTCTCTGCAGACCCCCATAGGTTTCTCCTAAACCTGGCCT-CTCCTATTAGGCAGCCTTACTCAGCGGCAGCTTCTCAGCTCCATGTTTTCAAGGAACCACAATTTATTTCCAGCATCCACTGAAGCATATTATCAGTGGTGATAGAGGGGGCTTGTAAAACTGTTTTTCCACTTAGGTATTAGA
GGGTGGCCATTATTTGAGAGTGAC-----TATGACCACAGTTAATCTGGTAATAAATTCTCTTGGGTAGGAGGGGGAAAGGAAAGGATGCTTTAAGGAAGCATCTTGCCAGGAGACACAAAGCTAACAAGAGTGGAGCCTGCAG----------------------------CTGGAGCCGCAGAGCCTAATCACTACACCCGCCCATCTCTGCTAGGGTTTCATGACTTCGTATCGGGGATTAGCAGTATTTAACTCTGTTGCACAAACATTTGGTGTA-----TTATTCAGGTAACAAGTAGCTAATAGAGGAAGTTTTACTTTTTTAAGACATAA--------------------ATTTGCCTTTTCCCAAATTACTTGGTACATAGTAC-TTTTCATGTTTGAAGTTGAGATGTGGGTACAATACCATAGCTTTATTCCAGAGCAGGGTATTTGTTTCCAAATGCCATGTTCCCAGCAGCTGCCCTTGACTGGGAATTGGGGTG-----TGATTTGGGCTTTTCCTTAAATCCTTGA-----GGAGCTGGA---GGGGTGGGTGGCTCGCACTCCTGCTTTctgg---------atctgaatc--------------ctgactctgtcatggacctgtt-tgactttgggcaagttgactcctattcctgagccccatat-ttttctcttctgtgaaattcagattaaaaA-AACATGGCTTTGATCAAACATTATAAATAATATATAGACAGACTGCTTGTTTTTATTGTATTGCCAG-AAATGAATCCTACTAATATTGCCATCTATGGACAGAAAATGTATTACCTGTCTTCATCAAGACCCAGACGAGGAAGAACACGAAAAGCGGAGATTAATTTTACTGCCATCTCCAGAACCGTCATCCTAATATTTACTTACAT-TTTATTATTATTTCAGGCTCATGCACATATACTTAGCATGGATCATTGGCCACAGACTCGCA
TACATTTAACTTTATTACCTTT-TGCCTCATGTATCTCATTAAAATTTTGCTGCTTAATCAAGGATCTGCATATTATTTTAATTTTAGAATTCACAGTTCCAAGACTTTGAAAGTTTCAAGCGTTCTGGGTGaatgtgttatgc--tctctcccgccaccatgtctttataccccctgatttctcagccact-atggcaaccactttctactcttagtagcccatatttag--tccaatccccagctcaggagacacttcttccaggg--agccccctgtgccttccagtagtatcttgtacctgccctttttgcaaagctctttcctcctggcttagaatggcccattgacctgtttgtttctcctattaaactgtaagccactcgagggtagagagcatctgttgttcaccattgcatcctcggtgctgagcactgcgtctgacatattatttagaaggtcagtaagtgctagtgggatTCAGGCTCCCAGTGGGTGGGAGAGAAAGGACGTAAGGAAGCAAGTGGTAAAGGCCCTCACAGA-GTATCAGCAGGCTGGTGTGA-GGGAGAAATGCAGAGGATGGGTGAGTAGCA-----TAATCGCTAATGAT-AGGGTAATGATAGAGCACATTTCACAACACCTTt-aagccctttcacgtgcatcagataatttgatcctcataaaagcctagagatagatatattacagg-gatgaaggtggagtattttgtggttatgtgatatg-tttaaaattatgcagtgagtaaatgactgggttcaaaccagaccttaaaagtctgttatctttccCTCG-AGCATGCAATGAAGTCTACATCATCCCTACCATGTCCATTTGATCACACCCTGGCCTCACAGCTCTGTGGTCTACAGGATACCTCATGGTGGTTTTATTGACCAGACAATAATCCTCTTTCTAAGGGGATGCATTTCATTAATACATATGTAGATCATGAATTGTC
TTTGACTTTGAGGGGATGGTAGC----CAGAGCAGAAAGCAAAGCTGATTTTCATCCCCGTCTGGTAATGTGGTTGGTAATGTGAAGA-TGGGTGTATTCTGAGATACCGGCTCCTTGCAGTGTGTGGTTCCTTCTGTTTTCAGGCCC------AAGAAGCCCATCCTGGGAAGGAAAATGCATTGGGGAACCCTGTGCGGATTCTTGTGGCTTTGGCCCTATCTTTTCTATGTCCAAGCTGTGCCCATCCAAAAAGTCCAAGATGACACCAAAACCCTCATCAAGACAATTGTCACCAGGATCAATGACATTTCACACACGGTAAGGAG---AGTATGCGGGGACAAA---GTAGAACTGCAGCCAGCCCAGCACTGGCTCCTAGTGGCACT-GGACC-CAGATAGtccaagaaacatttattgaacgcctcctgaatgccaggcacctactggaagctgagaaGGATTTGAAAGCACAGGGC-TCCACTCTTTCTGGTTGTTTCTTTTGGCCCCTCTGCCTGCTGAGATTCCAGGGGTTAGTGG--------------------------------------------------------TTCTAATTCTAAACCACTCCAAGAACATTTGATTTTGCTACATGTTTCCATTTAAAAATCATAGGATTTGggctgggtgtggtggcttgtacctgtcatcccagcactttgggaggccaaagcaggaggatcattcgagcccaagagttcgagaccagcctgggcagcatagggagaccccatctctacaaaaataataaaaaatgttagctgggcatggtggtgtgtacctgtggtcccagctaggggaggctgagatggaaggatcacctgagcctgggaggttgaggctgcagtgggccctgatcatgccaccgtgctccagcctgggtgacagagtgagaccttgtctcaaaataaataaataaataaataaaAGTCATAGGA
TTTgatcaggcatgatgggtcacatctgtaagcccattgctttaggaggccaaggtaggaggatcagttgaggccaggagttcaagaccagcctgggcaacatggcaagacctctctctctaatttttaaaaaaataaaaaTTAAAGATAAGAAAAAAATCATAGGATTCTCATGAGGCCTCACGTGCTTATTTTCAACCTACCAAGGGGAAACCCAGGCCTCAGCGATTAGCTGAGC----------CACATGCAGGCACAG------------------------CCACTG-----TCTCTTTCCTTCCTGTCCCCTCTGTCCCCACCTTCTGCGCTCGCCTTCCTCCCTGACTTCACTTCCTTGAATCTTAGTGCCTACGACCAGAGGGAGCTGTGAAGTTCCTTG----TGTCCCATTGGCAGGAA-CAAGACCCCCAGAAGCATCTCCTCAGGGC------CTCTA-----TCCCATCTC-TAGATGTGCTTGTCATTAGG-Gttct-------------tgtagttccagctgatctctggccctgccgctcaaagatacccaaaagagcgagtctaccctttttcacattcaaccctctactgatttgcaaatagcagtcagtgcccaccctggtcttttctctggggtccagcaggcctagaccttcagccattttcctgatgaGGTCTGTAtttgaaattaggaagattaagtttgaatcttcacacttctgat----gtctgtgagatcttcagcaagttccttact--gtctttaagccttgt-tttcatcatctggataatggggatatcacacacta-ttcacaaggttgttatgaggcctaaattagctaaagcaATTGAATCCTCCTTACCCCCTGCATGGAGCTCTCTGGAGACTTCCACGTCTCCTGGTCATTGTGGGTGTCTTATGGTA-GTCTTGGGCAGTTAGGGAGAAGTTAGGTGTCTGGAAGCAAA
GATGGCTCAGAACTAGATAGAGTC-TTGGGCATTTTATA-GATAAAAACTCTT--GTCTCCtttaaaaataataaaaaaaaattaGCTGGGCATATTAGCCACTCAGCAAGACTGCACGTGATAGATCCCGAGTGCCCCACCTTGGGTGGTGTAATACACAATATCACGGGAGCCCCGGGTAGTAACCACGGAGGTGTCAGCCTCAGTGCTGTGGGCAGATG-GATGGGGAGAGCC--TCCCGG-AACTGGAGTCACTGGAGCA----------------------------GGGTTGGGGGGCCTCACTGAGGGTACGGCCTTGATCTCTAAGGAGGAGGGACTGCCTGGAAAAGC-TGACTGGGAGGGAGGACTCGGCTGGGGGTAGAAGGGA----------CTAGGGAAGGCTGGGGGTGGGGGTGCTTATGGAGGACCTCAGATGCCTGGGGAACAGACTCCACTAAATAAAACATATGAAACCATGGCTGGTTCTTCAGCAGAGGCCATGTAGAGAAAGGAATGACCTAGGAAAGTTGGCCTGGAAGTGGAGGGAAGGATGGTGTGGGAAAAGCAGGAA--------TCTCGGAGACCAGCTTAGAGGCTTGGCAGTCACCTGGGTGCAGG-ATACAAGGGCCTGAGCCAAAGTGGTGAGGGAGGGTGGAAGGAGGCAGCCCAGAGAATGACCCTCCATGCCCACGGGGAAGGCAGAGGGCTCT-GAGAGCGA--TTCCTCCCACATG-CT-GAGCACTTGTTCTCCCTCTTCCTCCTGCATAGCAGTCAGTCTCCTCCAAACAGAAAGTCACCGGTTTGGACTTCATTCCTGGGCTCCACCCCATCCTGACCTTATCCAAGATGGACCAGACACTGGCAGTCTACCAACAGATCCTCACCAGTATGCCTTCCAGAAACGTGATCCAAATATCCAACGACCTGGAGAACCTCCGGGATCTTCTTCACGTGCTGGCCTTCTCTA
AGAGCTGCCACTTGCCCTGGGCCAGTGGCCTGGAGACCTTGGACAGCCTGGGGGGTGTCCTGGAAGCTTCAGGCTACTCCACAGAGGTGGTGGCCCTGAGCAGGCTGCAGGGGTCTCTGCAGGACATGCTGTGGCAGCTGGACCTCAGCCCTGGGTGCTGAGGCCTTGAAGGTCACTCTTCCTGCAAGGACTACGTTAAGGGAAGGAACTCTGGCTTCCAGGTATCTCCAGGATTGAAGAGCATTGCATGGACACCCCTTATCCAGGACTCTGTCAATT--TCCCTGACTCCTCTAAGCCACTCTTCCAAAGGCATAAGACCCTAAGCCTCCTTTTGCTTGAAACCAAAGATATATACACAGGATCCTATTCTCACCAGGAAGGGGG-TCCACCC-AGCAAAGAGTGGGCTGCATCTGGGATTCCCACCAAGGTCTTCAGCCATCA---ACAAGAGTTGTCTTGTCCCCTCT-TGACCCATCT-----------------CCCCCTCACTGAATGCCTCAATGTGACCAGGGGTGATTTCAGAGAGGGCAGAGGGGTAGGCAGAGCCTTTGGATGACCA--GAACAAGGTTCCCTCTGAGAATTCCAAGGAGTTCCATGAAGACCACATCCACACACG--CAGGAACTCCC--AGCAACACAAGCTGGAA---GCACATGTTTATTTATTCTGCATTTTATTCTGGATGGATTTGAAGCAAAGCACCAGCTTCTCCAGGCTCTTTGGGGTCAGCCAGGGCCAGGGGTCTCCCTGGAGTGCAGTTTCCAATCCCATAGATGGGTC-TGGCTGAGCTGAACCCA---TTTTGAGTGACT----CGAGGGTTGGG-TTCATCTGAGCAAGAGCTGGCAAAGGTGGCTCTCCAGTTAGTTCTCTCGTAACTGGTTTCATTTCTACTGTGACTGATGTTACATCACAGTGTTTGCAATGGTGTTGCCCTGAGTGGATCTCCAAGGACCAGGTTA
TTTTAAAA---AGATTTGTTTTGTCAAGTGTCATATGTAGGTGTCTGCACCCAGGGGTGGG-GAATGTTTGGGCAGAAGGGAGAAGGATCTAGAATGTGTTTTCTGAATAACATTTGTGTGGTGGGTTCTTTGGAAGGAGTGAGA-TCATTTTCTTATCTTCTGCAATTGCTTAGGATGTTTTTCATGAAAA------------TAGCTCTTTCAG-GGGGGTTGTGAGGCCTGGCCAGGCACCCCCTGGAGAGAAGTTTCTGGCCCTGGCTGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAAAGGGCTGAAAGCCATTTGTTGGGGCAGTGGTAAGCTCTGGCTTTCTCCGACTGCTAGGGAGTGGTCTTTCCTATCATGGAGTGACGGTCCCACACTGGTGACTGCGATCTTCAGAGCAGGGGTCCTTGGTGT-GACCCTCTGAATGGTCCAGGGTTGATCACACTCTGGGTTTATTACATGGCAG-----TGTTCCTATTTGGGGCTTGCATGCCAAATTGTAGTTCTTGTCTGATTGGCTCACCC-AAGCAAGGCCAAAATTACCAAAAATCTTGGGGGG--TTTTTACTC-CAGTGGTGAAGAAAACTCCTTTAGCAGG-TGGTCCTGAGACCT-GACAAGCACTGCTAGGCGAGTGCCAGGACTCCCCAGGCCAGGCCACCAGGATGGCCCTTCCCACTGGAGGTCACATTCAGGAAGATGAAAGAGGAGGTTTGGGGTCTGCCACCATCCTGCTGCTGTGTTTTTGCTATCACACAGTGGGTGGTGGATCTGTCCAAGGAAACTTGAATCAAAGCAGTTAAC-TTTAAGactgagcacctgcttcatgctcagccctgactggtgctataggctggagaagctcacccaataaacattaagatt-gaggcctgccctcagggatcttgcattcccagtggTCAAACC-GCAC
TCACCCATGTGCCAAGGTGGGGTA-TTTACCACAGCAG--CTGAACAGCCAAATGCATGGTGCAGTTGACAGCAGGTGGGAAATGGTATGAGCTGAGGGGGGCCGTGCCCAGGGGCCCACAGG-GAACCCTGCTTGCACTTTGTAACATGTTTA-----CTTTTCagggcatcttagctt---ctatta-----tagccacatccctttga---aacaagataactgagaatttaaaaataagaa-----aata--TGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAAATGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCATGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGACCCCAAAGAGCCTGGAGAAGCTGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAGATGCTTTGCTTCAAATCCATCCAGAATAAAACGCA
diff -r c44567359a03 -r e899101e63d1 test-data/users/test3(a)bx.psu.edu/run1/2.fasta
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/users/test3(a)bx.psu.edu/run1/2.fasta Tue Jun 16 09:55:15 2009 -0400
@@ -0,0 +1,11 @@
+>Sequence 561 BP; 135 A; 106 C; 98 G; 222 T; 0 other;
+gttcgatgcc taaaatacct tcttttgtcc ctacacagac cacagttttc ctaatggctt
+tacaccgact agaaattctt gtgcaagcac taattgaaag cggttggcct agagtgttac
+cggtttgtat agctgagcgc gtctcttgcc ctgatcaaag gttcattttc tctactttgg
+aagacgttgt ggaagaatac aacaagtacg agtctctccc ccctggtttg ctgattactg
+gatacagttg taataccctt cgcaacaccg cgtaactatc tatatgaatt attttccctt
+tattatatgt agtaggttcg tctttaatct tcctttagca agtcttttac tgttttcgac
+ctcaatgttc atgttcttag gttgttttgg ataatatgcg gtcagtttaa tcttcgttgt
+ttcttcttaa aatatttatt catggtttaa tttttggttt gtacttgttc aggggccagt
+tcattattta ctctgtttgt atacagcagt tcttttattt ttagtatgat tttaatttaa
+aacaattcta atggtcaaaa a
\ No newline at end of file
diff -r c44567359a03 -r e899101e63d1 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Fri Jun 12 09:56:23 2009 -0400
+++ b/test/base/twilltestcase.py Tue Jun 16 09:55:15 2009 -0400
@@ -1150,7 +1150,6 @@
tc.fv( "1", "file_format", file_format )
tc.fv( "1", "dbkey", dbkey )
tc.fv( "1", "message", message.replace( '+', ' ' ) )
- library_dir = "%s" % self.file_dir
tc.fv( "1", "server_dir", "library" )
tc.submit( "new_dataset_button" )
check_str = "Added 3 dataset versions to the library dataset '%s' in the folder '%s'." % ( ldda_name, folder_name )
@@ -1167,34 +1166,55 @@
check_str = "Added 1 datasets to the folder '%s' ( each is selected )." % folder_name
self.check_page_for_string( check_str )
self.home()
- def add_datasets_from_library_dir( self, library_id, folder_id, folder_name, file_format='auto', dbkey='hg18', roles_tuple=[],
- message='', root=False, check_template_str1='', check_template_str2='', check_template_str3='' ):
+ def add_dir_of_files_from_admin_view( self, library_id, folder_id, file_format='auto', dbkey='hg18', roles_tuple=[],
+ message='', check_str_after_submit='', check_str1='', check_str2='', check_str3='' ):
"""Add a directory of datasets to a folder"""
# roles is a list of tuples: [ ( role_id, role_description ) ]
self.home()
self.visit_url( "%s/admin/library_dataset_dataset_association?upload_option=upload_directory&library_id=%s&folder_id=%s" % ( self.url, library_id, folder_id ) )
self.check_page_for_string( 'Upload a directory of files' )
# If we've been sent some template labels, make sure they are included in the upload form
- if check_template_str1:
- self.check_page_for_string( check_template_str1 )
- if check_template_str2:
- self.check_page_for_string( check_template_str2 )
- if check_template_str3:
- self.check_page_for_string( check_template_str3 )
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if check_str3:
+ self.check_page_for_string( check_str3 )
tc.fv( "1", "folder_id", folder_id )
tc.fv( "1", "file_format", file_format )
tc.fv( "1", "dbkey", dbkey )
tc.fv( "1", "message", message.replace( '+', ' ' ) )
- library_dir = "%s" % self.file_dir
tc.fv( "1", "server_dir", "library" )
for role_tuple in roles_tuple:
tc.fv( "1", "roles", role_tuple[1] ) # role_tuple[1] is the role name
tc.submit( "new_dataset_button" )
- if root:
- check_str = "Added 3 datasets to the library '%s' ( each is selected )." % folder_name
- else:
- check_str = "Added 3 datasets to the folder '%s' ( each is selected )." % folder_name
- self.check_page_for_string( check_str )
+ if check_str_after_submit:
+ self.check_page_for_string( check_str_after_submit )
+ self.home()
+ def add_dir_of_files_from_libraries_view( self, library_id, folder_id, selected_dir, file_format='auto', dbkey='hg18', roles_tuple=[],
+ message='', check_str_after_submit='', check_str1='', check_str2='', check_str3='' ):
+ """Add a directory of datasets to a folder"""
+ # roles is a list of tuples: [ ( role_id, role_description ) ]
+ self.home()
+ self.visit_url( "%s/library/library_dataset_dataset_association?upload_option=upload_directory&library_id=%s&folder_id=%s" % ( self.url, library_id, folder_id ) )
+ self.check_page_for_string( 'Upload a directory of files' )
+ # If we've been sent some template labels, make sure they are included in the upload form
+ if check_str1:
+ self.check_page_for_string( check_str1 )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if check_str3:
+ self.check_page_for_string( check_str3 )
+ tc.fv( "1", "folder_id", folder_id )
+ tc.fv( "1", "file_format", file_format )
+ tc.fv( "1", "dbkey", dbkey )
+ tc.fv( "1", "message", message.replace( '+', ' ' ) )
+ tc.fv( "1", "server_dir", selected_dir )
+ for role_tuple in roles_tuple:
+ tc.fv( "1", "roles", role_tuple[1] ) # role_tuple[1] is the role name
+ tc.submit( "new_dataset_button" )
+ if check_str_after_submit:
+ self.check_page_for_string( check_str_after_submit )
self.home()
def delete_library_item( self, library_id, library_item_id, library_item_name, library_item_type='library_dataset' ):
"""Mark a library item as deleted"""
diff -r c44567359a03 -r e899101e63d1 test/functional/__init__.py
--- a/test/functional/__init__.py Fri Jun 12 09:56:23 2009 -0400
+++ b/test/functional/__init__.py Tue Jun 16 09:55:15 2009 -0400
@@ -80,6 +80,7 @@
allow_user_deletion = True,
admin_users = 'test(a)bx.psu.edu',
library_import_dir = galaxy_test_file_dir,
+ user_library_import_dir = os.path.join( galaxy_test_file_dir, 'users' ),
global_conf = { "__file__": "universe_wsgi.ini.sample" } )
log.info( "Embedded Universe application started" )
diff -r c44567359a03 -r e899101e63d1 test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Fri Jun 12 09:56:23 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Tue Jun 16 09:55:15 2009 -0400
@@ -1396,28 +1396,26 @@
( self.url, str( library_one.id ), str( subfolder_one.id ), str( ldda_six_version_two.id ) ) )
self.check_page_for_string( 'This is an expired version of this library dataset' )
self.home()
- def test_185_upload_datasets_from_library_dir( self ):
- """Testing uploading 3 datasets from a library directory to a root folder"""
+ def test_185_upload_directory_of_files_from_admin_view( self ):
+ """Testing uploading a directory of files to a root folder from the Admin view"""
message = 'This is a test for uploading a directory of files'
roles_tuple = [ ( str( role_one.id ), role_one.name ) ]
+ check_str = "Added 3 datasets to the library '%s' ( each is selected )." % library_one.root_folder.name
## TODO: temporarily eliminating templates until we have the new forms features done
"""
- self.add_datasets_from_library_dir( str( library_one.id ),
+ self.add_dir_of_files_from_admin_view( str( library_one.id ),
str( library_one.root_folder.id ),
- library_one.root_folder.name,
roles_tuple=roles_tuple,
message=message.replace( '+', ' ' ),
- root=True,
+ check_str=check_str,
check_template_str1='wind',
check_template_str2='bag',
check_template_str3='Fubar' )
"""
- self.add_datasets_from_library_dir( str( library_one.id ),
- str( library_one.root_folder.id ),
- library_one.root_folder.name,
- roles_tuple=roles_tuple,
- message=message.replace( '+', ' ' ),
- root=True )
+ self.add_dir_of_files_from_admin_view( str( library_one.id ),
+ str( library_one.root_folder.id ),
+ roles_tuple=roles_tuple,
+ message=message.replace( '+', ' ' ) )
self.home()
self.visit_page( 'admin/browse_library?id=%s' % ( str( library_one.id ) ) )
self.check_page_for_string( admin_user.email )
@@ -1531,7 +1529,48 @@
pass
check_edit_page2( latest_3_lddas )
self.home()
- def test_195_mark_group_deleted( self ):
+ def test_195_upload_directory_of_files_from_libraries_view( self ):
+ """Testing uploading a directory of files to a root folder from the Libraries view"""
+ # admin_user will not have the option sto upload a directory of files from the
+ # Libraries view since a sub-directory named the same as their email is not contained
+ # in the configured user_library_import_dir. However, since members of role_one have
+ # the LIBRARY_ADD permission, we can test this feature as regular_user1 or regular_user3
+ self.logout()
+ self.login( email=regular_user1.email )
+ message = 'Uploaded all files in test-data/users/test1...'
+ # Since regular_user1 does not have any sub-directories contained within her configured
+ # user_library_import_dir, the only option in her server_dir select list will be the
+ # directory named the same as her email
+ check_str_after_submit = "Added 1 datasets to the library '%s' ( each is selected )." % library_one.root_folder.name
+ self.add_dir_of_files_from_libraries_view( str( library_one.id ),
+ str( library_one.root_folder.id ),
+ regular_user1.email,
+ check_str_after_submit=check_str_after_submit,
+ message=message.replace( '+', ' ' ) )
+ self.home()
+ self.visit_page( 'library/browse_library?id=%s' % ( str( library_one.id ) ) )
+ self.check_page_for_string( regular_user1.email )
+ self.check_page_for_string( message )
+ self.logout()
+ self.login( regular_user3.email )
+ message = 'Uploaded all files in test-data/users/test3.../run1'
+ # Since regular_user2 has a subdirectory contained within her configured user_library_import_dir,
+ # she will have a "None" option in her server_dir select list
+ check_str1 = '<option>None</option>'
+ self.add_dir_of_files_from_libraries_view( str( library_one.id ),
+ str( library_one.root_folder.id ),
+ 'run1',
+ check_str_after_submit=check_str_after_submit,
+ check_str1=check_str1,
+ message=message.replace( '+', ' ' ) )
+ self.home()
+ self.visit_page( 'library/browse_library?id=%s' % ( str( library_one.id ) ) )
+ self.check_page_for_string( regular_user3.email )
+ self.check_page_for_string( message )
+ self.home()
+ self.logout()
+ self.login( email=admin_user.email )
+ def test_200_mark_group_deleted( self ):
"""Testing marking a group as deleted"""
self.home()
self.visit_url( '%s/admin/groups' % self.url )
@@ -1545,13 +1584,13 @@
raise AssertionError( '%s incorrectly lost all members when it was marked as deleted.' % group_two.name )
if not group_two.roles:
raise AssertionError( '%s incorrectly lost all role associations when it was marked as deleted.' % group_two.name )
- def test_200_undelete_group( self ):
+ def test_205_undelete_group( self ):
"""Testing undeleting a deleted group"""
self.undelete_group( str( group_two.id ), group_two.name )
group_two.refresh()
if group_two.deleted:
raise AssertionError( '%s was not correctly marked as not deleted.' % group_two.name )
- def test_205_mark_role_deleted( self ):
+ def test_210_mark_role_deleted( self ):
"""Testing marking a role as deleted"""
self.home()
self.visit_url( '%s/admin/roles' % self.url )
@@ -1565,10 +1604,10 @@
raise AssertionError( '%s incorrectly lost all user associations when it was marked as deleted.' % role_two.name )
if not role_two.groups:
raise AssertionError( '%s incorrectly lost all group associations when it was marked as deleted.' % role_two.name )
- def test_210_undelete_role( self ):
+ def test_215_undelete_role( self ):
"""Testing undeleting a deleted role"""
self.undelete_role( str( role_two.id ), role_two.name )
- def test_215_mark_dataset_deleted( self ):
+ def test_220_mark_dataset_deleted( self ):
"""Testing marking a library dataset as deleted"""
self.home()
self.delete_library_item( str( library_one.id ), str( ldda_two.library_dataset.id ), ldda_two.name, library_item_type='library_dataset' )
@@ -1581,13 +1620,13 @@
except:
pass
self.home()
- def test_220_display_deleted_dataset( self ):
+ def test_225_display_deleted_dataset( self ):
"""Testing displaying deleted dataset"""
self.home()
self.visit_url( "%s/admin/browse_library?id=%s&show_deleted=True" % ( self.url, str( library_one.id ) ) )
self.check_page_for_string( ldda_two.name )
self.home()
- def test_225_hide_deleted_dataset( self ):
+ def test_230_hide_deleted_dataset( self ):
"""Testing hiding deleted dataset"""
self.home()
self.visit_url( "%s/admin/browse_library?id=%s&show_deleted=False" % ( self.url, str( library_one.id ) ) )
@@ -1597,7 +1636,7 @@
except:
pass
self.home()
- def test_230_mark_folder_deleted( self ):
+ def test_235_mark_folder_deleted( self ):
"""Testing marking a library folder as deleted"""
self.home()
self.delete_library_item( str( library_one.id ), str( folder_two.id ), folder_two.name, library_item_type='folder' )
@@ -1609,7 +1648,7 @@
except:
pass
self.home()
- def test_230_mark_folder_undeleted( self ):
+ def test_240_mark_folder_undeleted( self ):
"""Testing marking a library folder as undeleted"""
self.home()
self.undelete_library_item( str( library_one.id ), str( folder_two.id ), folder_two.name, library_item_type='folder' )
@@ -1624,7 +1663,7 @@
except:
pass
self.home()
- def test_235_mark_library_deleted( self ):
+ def test_245_mark_library_deleted( self ):
"""Testing marking a library as deleted"""
self.home()
# First mark folder_two as deleted to further test state saving when we undelete the library
@@ -1648,7 +1687,7 @@
except:
pass
self.home()
- def test_245_purge_user( self ):
+ def test_250_purge_user( self ):
"""Testing purging a user account"""
self.mark_user_deleted( user_id=regular_user3.id, email=regular_user3.email )
regular_user3.refresh()
@@ -1680,7 +1719,7 @@
role = galaxy.model.Role.get( ura.role_id )
if role.type != 'private':
raise AssertionError( 'UserRoleAssociations for user %s are not related with the private role.' % regular_user3.email )
- def test_250_manually_unpurge_user( self ):
+ def test_255_manually_unpurge_user( self ):
"""Testing manually un-purging a user account"""
# Reset the user for later test runs. The user's private Role and DefaultUserPermissions for that role
# should have been preserved, so all we need to do is reset purged and deleted.
@@ -1688,7 +1727,7 @@
regular_user3.purged = False
regular_user3.deleted = False
regular_user3.flush()
- def test_255_purge_group( self ):
+ def test_260_purge_group( self ):
"""Testing purging a group"""
group_id = str( group_two.id )
self.mark_group_deleted( group_id, group_two.name )
@@ -1703,7 +1742,7 @@
raise AssertionError( "Purging the group did not delete the GroupRoleAssociations for group_id '%s'" % group_id )
# Undelete the group for later test runs
self.undelete_group( group_id, group_two.name )
- def test_260_purge_role( self ):
+ def test_265_purge_role( self ):
"""Testing purging a role"""
role_id = str( role_two.id )
self.mark_role_deleted( role_id, role_two.name )
@@ -1728,14 +1767,14 @@
dp = galaxy.model.DatasetPermissions.filter( galaxy.model.DatasetPermissions.table.c.role_id == role_id ).all()
if dp:
raise AssertionError( "Purging the role did not delete the DatasetPermissionss for role_id '%s'" % role_id )
- def test_265_manually_unpurge_role( self ):
+ def test_270_manually_unpurge_role( self ):
"""Testing manually un-purging a role"""
# Manually unpurge, then undelete the role for later test runs
# TODO: If we decide to implement the GUI feature for un-purging a role, replace this with a method call
role_two.purged = False
role_two.flush()
self.undelete_role( str( role_two.id ), role_two.name )
- def test_270_purge_library( self ):
+ def test_275_purge_library( self ):
"""Testing purging a library"""
self.home()
self.delete_library_item( str( library_one.id ), str( library_one.id ), library_one.name, library_item_type='library' )
@@ -1771,7 +1810,7 @@
raise AssertionError( 'The library_dataset id %s named "%s" has not been marked as deleted.' % \
( str( library_dataset.id ), library_dataset.name ) )
check_folder( library_one.root_folder )
- def test_275_reset_data_for_later_test_runs( self ):
+ def test_280_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to pass"""
##################
# Eliminate all non-private roles
diff -r c44567359a03 -r e899101e63d1 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample Fri Jun 12 09:56:23 2009 -0400
+++ b/universe_wsgi.ini.sample Tue Jun 16 09:55:15 2009 -0400
@@ -55,9 +55,13 @@
# Galaxy session security
id_secret = changethisinproductiontoo
-# Files in directories under this directory can be directly imported through
-# the library admin's "add dataset" tool
+# Directories of files contained in the following directory can be uploaded to a library from the Admin view
#library_import_dir = /var/opt/galaxy/import
+# The following can be configured to allow non-admin users to upload a directory of files. The
+# configured directory must contain sub-directories named the same as the non-admin user's Galaxy
+# login ( email ). The non-admin user is restricted to uploading files or sub-directories of files
+# contained in their directory.
+# user_library_import_dir = /var/opt/galaxy/import/users
# path to sendmail
sendmail_path = /usr/sbin/sendmail
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/c6031c4e6546
changeset: 2448:c6031c4e6546
user: ianschenck(a)Thugunit.local
date: Thu Apr 23 15:11:29 2009 -0400
description:
Merge with main.
0 file(s) affected in this change:
diffs (1069 lines):
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Apr 23 14:42:35 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Apr 23 15:11:29 2009 -0400
@@ -229,7 +229,7 @@
return des
@property
def activatable_datasets( self ):
- return [ hda for hda in self.datasets if not hda.dataset.purged ] #this needs to be a list
+ return [ hda for hda in self.datasets if not hda.dataset.deleted ] #this needs to be a list
class UserRoleAssociation( object ):
def __init__( self, user, role ):
@@ -707,6 +707,12 @@
@property
def active_components( self ):
return list( self.active_folders ) + list( self.active_datasets )
+ @property
+ def activatable_datasets( self ):
+ return [ ld for ld in self.datasets if not ld.library_dataset_dataset_association.dataset.deleted ] #this needs to be a list
+ @property #make this a relation
+ def activatable_folders( self ):
+ return [ folder for folder in self.folders if not folder.purged ] #this needs to be a list
class LibraryDataset( object ):
# This class acts as a proxy to the currently selected LDDA
@@ -743,6 +749,14 @@
name = property( get_name, set_name )
def display_name( self ):
self.library_dataset_dataset_association.display_name()
+ def get_purged( self ):
+ return self.library_dataset_dataset_association.dataset.purged
+ def set_purged( self, purged ):
+ if purged:
+ raise Exception( "Not implemented" )
+ if not purged and self.purged:
+ raise Exception( "Cannot unpurge once purged" )
+ purged = property( get_purged, set_purged )
def get_library_item_info_templates( self, template_list=[], restrict=False ):
# If restrict is True, we'll return only those templates directly associated with this LibraryDataset
if self.library_dataset_info_template_associations:
@@ -750,7 +764,7 @@
if restrict not in [ 'True', True ]:
self.folder.get_library_item_info_templates( template_list, restrict )
return template_list
-
+
class LibraryDatasetDatasetAssociation( DatasetInstance ):
def __init__( self,
copied_from_history_dataset_association=None,
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,74 @@
+import sys, logging, os, time
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+from migrate import migrate_engine
+from sqlalchemy import and_
+
+# load existing galaxy model, we are only changing data
+import galaxy.model
+from galaxy.model import mapping
+model = mapping.init( galaxy.model.Dataset.file_path, str( migrate_engine.url ) )
+
+def __guess_dataset_by_filename( filename ):
+ """Return a guessed dataset by filename"""
+ try:
+ fields = os.path.split( filename )
+ if fields:
+ if fields[-1].startswith( 'dataset_' ) and fields[-1].endswith( '.dat' ): #dataset_%d.dat
+ return model.Dataset.get( int( fields[-1][ len( 'dataset_' ): -len( '.dat' ) ] ) )
+ except:
+ pass #some parsing error, we can't guess Dataset
+ return None
+
+def upgrade():
+ log.debug( "Fixing a discrepancy concerning deleted shared history items." )
+ affected_items = 0
+ start_time = time.time()
+ for dataset in model.Dataset.filter( and_( model.Dataset.c.deleted == True, model.Dataset.c.purged == False ) ).all():
+ for dataset_instance in dataset.history_associations + dataset.library_associations:
+ if not dataset_instance.deleted:
+ dataset.deleted = False
+ if dataset.file_size in [ None, 0 ]:
+ dataset.set_size() #Restore filesize
+ affected_items += 1
+ break
+ galaxy.model.mapping.Session.flush()
+ log.debug( "%i items affected, and restored." % ( affected_items ) )
+ log.debug( "Time elapsed: %s" % ( time.time() - start_time ) )
+
+ #fix share before hda
+ log.debug( "Fixing a discrepancy concerning cleaning up deleted history items shared before HDAs." )
+ dataset_by_filename = {}
+ changed_associations = 0
+ start_time = time.time()
+ for dataset in model.Dataset.filter( model.Dataset.external_filename.like( '%dataset_%.dat' ) ).all():
+ if dataset.file_name in dataset_by_filename:
+ guessed_dataset = dataset_by_filename[ dataset.file_name ]
+ else:
+ guessed_dataset = __guess_dataset_by_filename( dataset.file_name )
+ if guessed_dataset and dataset.file_name != guessed_dataset.file_name:#not os.path.samefile( dataset.file_name, guessed_dataset.file_name ):
+ guessed_dataset = None
+ dataset_by_filename[ dataset.file_name ] = guessed_dataset
+
+ if guessed_dataset is not None and guessed_dataset.id != dataset.id: #could we have a self referential dataset?
+ for dataset_instance in dataset.history_associations + dataset.library_associations:
+ dataset_instance.dataset = guessed_dataset
+ changed_associations += 1
+ #mark original Dataset as deleted and purged, it is no longer in use, but do not delete file_name contents
+ dataset.deleted = True
+ dataset.external_filename = "Dataset was result of share before HDA, and has been replaced: %s mapped to Dataset %s" % ( dataset.external_filename, guessed_dataset.id )
+ dataset.purged = True #we don't really purge the file here, but we mark it as purged, since this dataset is now defunct
+ galaxy.model.mapping.Session.flush()
+ log.debug( "%i items affected, and restored." % ( changed_associations ) )
+ log.debug( "Time elapsed: %s" % ( time.time() - start_time ) )
+
+def downgrade():
+ log.debug( "Downgrade is not possible." )
+
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Thu Apr 23 14:42:35 2009 -0400
+++ b/lib/galaxy/util/__init__.py Thu Apr 23 15:11:29 2009 -0400
@@ -146,7 +146,7 @@
elif isinstance( value, list ):
return map(sanitize_text, value)
else:
- raise Exception, 'Unknown parameter type'
+ raise Exception, 'Unknown parameter type (%s)' % ( type( value ) )
class Params:
"""
diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Thu Apr 23 14:42:35 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Thu Apr 23 15:11:29 2009 -0400
@@ -822,11 +822,13 @@
msg=util.sanitize_text( msg ),
messagetype='error' ) )
created_ldda_ids = params.get( 'created_ldda_ids', '' )
+ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
return trans.fill_template( '/admin/library/browse_library.mako',
library=trans.app.model.Library.get( id ),
created_ldda_ids=created_ldda_ids,
msg=msg,
- messagetype=messagetype )
+ messagetype=messagetype,
+ show_deleted=show_deleted )
@web.expose
@web.require_admin
def library( self, trans, id=None, **kwd ):
@@ -960,7 +962,8 @@
libraries=libraries,
deleted=True,
msg=msg,
- messagetype=messagetype )
+ messagetype=messagetype,
+ show_deleted = True )
@web.expose
@web.require_admin
def undelete_library( self, trans, **kwd ):
@@ -2040,6 +2043,50 @@
id=library_id,
msg=util.sanitize_text( msg ),
messagetype=messagetype ) )
+
+ @web.expose
+ @web.require_admin
+ def delete_library_item( self, trans, library_id = None, library_item_id = None, library_item_type = None ):
+ #this action will handle deleting all types of library items in library browsing mode
+ library_item_types = { 'library': trans.app.model.Library, 'folder': trans.app.model.LibraryFolder, 'dataset': trans.app.model.LibraryDataset, }
+ if library_item_type not in library_item_types:
+ raise ValueError( 'Bad library_item_type specified: %s' % library_item_types )
+ if library_item_id is None:
+ raise ValueError( 'library_item_id not specified' )
+ library_item = library_item_types[ library_item_type ].get( int( library_item_id ) )
+ library_item.deleted = True
+ library_item.flush()
+ #need to str because unicode isn't accepted...
+ msg = str( "%s '%s' has been marked deleted" % ( library_item_type, library_item.name ) )
+ messagetype = str( "done" )
+ if library_item_type == 'library' or library_id is None:
+ return self.browse_libraries( trans, msg = msg, messagetype = messagetype )
+ else:
+ return self.browse_library( trans, id = library_id , msg = msg, messagetype = messagetype )
+
+ @web.expose
+ @web.require_admin
+ def undelete_library_item( self, trans, library_id = None, library_item_id = None, library_item_type = None ):
+ #this action will handle deleting all types of library items in library browsing mode
+ library_item_types = { 'library': trans.app.model.Library, 'folder': trans.app.model.LibraryFolder, 'dataset': trans.app.model.LibraryDataset, }
+ if library_item_type not in library_item_types:
+ raise ValueError( 'Bad library_item_type specified: %s' % library_item_types )
+ if library_item_id is None:
+ raise ValueError( 'library_item_id not specified' )
+ library_item = library_item_types[ library_item_type ].get( int( library_item_id ) )
+ if library_item.purged:
+ raise ValueError( '%s %s cannot be undeleted' % ( library_item_type, library_item.name ) )
+ library_item.deleted = False
+ library_item.flush()
+ msg = str( "%s '%s' has been undeleted" % ( library_item_type, library_item.name ) )
+ messagetype = str( "done" )
+ if library_item_type == 'library' or library_id is None:
+ return self.browse_libraries( trans, msg = msg, messagetype = messagetype )
+ else:
+ return self.browse_library( trans, id = library_id , msg = msg, messagetype = messagetype )
+
+
+
#(a)web.expose
#(a)web.require_admin
#def delete_dataset( self, trans, id=None, **kwd):
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/cleanup_datasets.py Thu Apr 23 15:11:29 2009 -0400
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-import sys, os, time, ConfigParser
+import sys, os, time, ConfigParser, shutil
from datetime import datetime, timedelta
from time import strftime
from optparse import OptionParser
@@ -15,7 +15,7 @@
pkg_resources.require( "SQLAlchemy >= 0.4" )
-from galaxy.model.orm import *
+from galaxy.model.orm import and_, eagerload
assert sys.version_info[:2] >= ( 2, 4 )
@@ -23,271 +23,172 @@
parser = OptionParser()
parser.add_option( "-d", "--days", dest="days", action="store", type="int", help="number of days (60)", default=60 )
parser.add_option( "-r", "--remove_from_disk", action="store_true", dest="remove_from_disk", help="remove datasets from disk when purged", default=False )
- parser.add_option( "-1", "--info_delete_userless_histories", action="store_true", dest="info_delete_userless_histories", default=False, help="info about the histories and datasets that will be affected by delete_userless_histories()" )
- parser.add_option( "-2", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" )
- parser.add_option( "-3", "--info_purge_histories", action="store_true", dest="info_purge_histories", default=False, help="info about histories and datasets that will be affected by purge_histories()" )
- parser.add_option( "-4", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" )
- parser.add_option( "-5", "--info_purge_datasets", action="store_true", dest="info_purge_datasets", default=False, help="info about the datasets that will be affected by purge_datasets()" )
- parser.add_option( "-6", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" )
+ parser.add_option( "-i", "--info_only", action="store_true", dest="info_only", help="info about the requested action", default=False )
+
+ parser.add_option( "-1", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" )
+
+ parser.add_option( "-2", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" )
+
+ parser.add_option( "-3", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" )
+
+ parser.add_option( "-4", "--purge_libraries", action="store_true", dest="purge_libraries", default=False, help="purge deleted libraries" )
+
+ parser.add_option( "-5", "--purge_folders", action="store_true", dest="purge_folders", default=False, help="purge deleted library folders" )
+
+
( options, args ) = parser.parse_args()
ini_file = args[0]
- if not ( options.info_delete_userless_histories ^ options.delete_userless_histories ^ \
- options.info_purge_histories ^ options.purge_histories ^ \
- options.info_purge_datasets ^ options.purge_datasets ):
+ if not ( options.purge_folders ^ options.delete_userless_histories ^ \
+ options.purge_libraries ^ options.purge_histories ^ \
+ options.purge_datasets ):
parser.print_help()
sys.exit(0)
+
+ if options.remove_from_disk and options.info_only:
+ parser.error( "remove_from_disk and info_only are mutually exclusive" )
conf_parser = ConfigParser.ConfigParser( {'here':os.getcwd()} )
conf_parser.read( ini_file )
configuration = {}
for key, value in conf_parser.items( "app:main" ):
configuration[key] = value
- database_connection = configuration['database_connection']
+
+ if 'database_connection' in configuration:
+ database_connection = configuration['database_connection']
+ else:
+ database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % configuration["database_file"]
file_path = configuration['file_path']
app = CleanupDatasetsApplication( database_connection=database_connection, file_path=file_path )
- h = app.model.History
- d = app.model.Dataset
- m = app.model.MetadataFile
cutoff_time = datetime.utcnow() - timedelta( days=options.days )
now = strftime( "%Y-%m-%d %H:%M:%S" )
-
+
print "\n# %s - Handling stuff older than %i days\n" % ( now, options.days )
-
- if options.info_delete_userless_histories:
- info_delete_userless_histories( h, cutoff_time )
- elif options.delete_userless_histories:
- delete_userless_histories( h, d, cutoff_time )
- if options.info_purge_histories:
- info_purge_histories( h, d, cutoff_time )
+
+ if options.info_only:
+ print "# Displaying info only ( --info_only )\n"
+ elif options.remove_from_disk:
+ print "# Datasets will be removed from disk.\n"
+ else:
+ print "# Datasets will NOT be removed from disk.\n"
+
+ if options.delete_userless_histories:
+ delete_userless_histories( app, cutoff_time, info_only = options.info_only )
elif options.purge_histories:
- if options.remove_from_disk:
- print "# Datasets will be removed from disk...\n"
- else:
- print "# Datasets will NOT be removed from disk...\n"
- purge_histories( h, d, m, cutoff_time, options.remove_from_disk )
- elif options.info_purge_datasets:
- info_purge_datasets( d, cutoff_time )
+ purge_histories( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
elif options.purge_datasets:
- if options.remove_from_disk:
- print "# Datasets will be removed from disk...\n"
- else:
- print "# Datasets will NOT be removed from disk...\n"
- purge_datasets( d, m, cutoff_time, options.remove_from_disk )
+ purge_datasets( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+ elif options.purge_libraries:
+ purge_libraries( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+ elif options.purge_folders:
+ purge_folders( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+
sys.exit(0)
-def info_delete_userless_histories( h, cutoff_time ):
- # Provide info about the histories and datasets that will be affected if the delete_userless_histories function is executed.
+def delete_userless_histories( app, cutoff_time, info_only = False ):
+ # Deletes userless histories whose update_time value is older than the cutoff_time.
+ # The purge history script will handle marking DatasetInstances as deleted.
+ # Nothing is removed from disk yet.
history_count = 0
- dataset_count = 0
- histories = h.filter( and_( h.table.c.user_id==None,
- h.table.c.deleted==False,
- h.table.c.update_time < cutoff_time ) ) \
- .options( eagerload( 'active_datasets' ) ).all()
-
- print '# The following datasets and associated userless histories will be deleted'
+ print '# The following datasets and associated userless histories have been deleted'
start = time.clock()
+ histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None,
+ app.model.History.table.c.deleted==False,
+ app.model.History.table.c.update_time < cutoff_time ) ).all()# \
for history in histories:
- for dataset_assoc in history.active_datasets:
- if not dataset_assoc.deleted:
- # This check is not necessary since 'active_datasets' are not
- # deleted, but just being cautious
- print "dataset_%d" %dataset_assoc.dataset_id
- dataset_count += 1
+ if not info_only:
+ history.deleted = True
print "%d" % history.id
history_count += 1
+ app.model.flush()
stop = time.clock()
- print "# %d histories ( including a total of %d datasets ) will be deleted\n" %( history_count, dataset_count )
- print "Elapsed time: ", stop - start, "\n"
-
-def delete_userless_histories( h, d, cutoff_time ):
- # Deletes userless histories whose update_time value is older than the cutoff_time.
- # The datasets associated with each history are also deleted. Nothing is removed from disk.
- history_count = 0
- dataset_count = 0
-
- print '# The following datasets and associated userless histories have been deleted'
- start = time.clock()
- histories = h.filter( and_( h.table.c.user_id==None,
- h.table.c.deleted==False,
- h.table.c.update_time < cutoff_time ) ) \
- .options( eagerload( 'active_datasets' ) ).all()
- for history in histories:
- for dataset_assoc in history.active_datasets:
- if not dataset_assoc.deleted:
- # Mark all datasets as deleted
- datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all()
- for dataset in datasets:
- if not dataset.deleted:
- dataset.deleted = True
- dataset.flush()
- # Mark the history_dataset_association as deleted
- dataset_assoc.deleted = True
- dataset_assoc.clear_associated_files()
- dataset_assoc.flush()
- print "dataset_%d" % dataset_assoc.dataset_id
- dataset_count += 1
- history.deleted = True
- history.flush()
- print "%d" % history.id
- history_count += 1
- stop = time.clock()
- print "# Deleted %d histories ( including a total of %d datasets )\n" %( history_count, dataset_count )
+ print "# Deleted %d histories.\n" % ( history_count )
print "Elapsed time: ", stop - start, "\n"
-def info_purge_histories( h, d, cutoff_time ):
- # Provide info about the histories and datasets that will be affected if the purge_histories function is executed.
+
+def purge_histories( app, cutoff_time, remove_from_disk, info_only = False ):
+ # Purges deleted histories whose update_time is older than the cutoff_time.
+ # The dataset associations of each history are also marked as deleted.
+ # The Purge Dataset method will purge each Dataset as necessary
+ # history.purged == True simply means that it can no longer be undeleted
+ # i.e. all associated datasets are marked as deleted
history_count = 0
- dataset_count = 0
- disk_space = 0
- print '# The following datasets and associated deleted histories will be purged'
+ print '# The following datasets and associated deleted histories have been purged'
start = time.clock()
- histories = h.filter( and_( h.table.c.deleted==True,
- h.table.c.purged==False,
- h.table.c.update_time < cutoff_time ) ) \
+ histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
+ app.model.History.table.c.purged==False,
+ app.model.History.table.c.update_time < cutoff_time ) ) \
.options( eagerload( 'datasets' ) ).all()
for history in histories:
for dataset_assoc in history.datasets:
- # Datasets can only be purged if their HistoryDatasetAssociation has been deleted.
- if dataset_assoc.deleted:
- datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all()
- for dataset in datasets:
- if dataset.purgable and not dataset.purged:
- print "%s" % dataset.file_name
- dataset_count += 1
- try:
- disk_space += dataset.file_size
- except:
- pass
+ _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
+ if not info_only:
+ history.purged = True
print "%d" % history.id
history_count += 1
+ app.model.flush()
stop = time.clock()
- print '# %d histories ( including a total of %d datasets ) will be purged. Freed disk space: ' %( history_count, dataset_count ), disk_space, '\n'
+ print '# Purged %d histories.' % ( history_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_histories( h, d, m, cutoff_time, remove_from_disk ):
- # Purges deleted histories whose update_time is older than the cutoff_time.
- # The datasets associated with each history are also purged.
- history_count = 0
- dataset_count = 0
- disk_space = 0
- file_size = 0
- errors = False
- print '# The following datasets and associated deleted histories have been purged'
+def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False ):
+ # Purges deleted libraries whose update_time is older than the cutoff_time.
+ # The dataset associations of each library are also marked as deleted.
+ # The Purge Dataset method will purge each Dataset as necessary
+ # library.purged == True simply means that it can no longer be undeleted
+ # i.e. all associated LibraryDatasets/folders are marked as deleted
+ library_count = 0
+ print '# The following libraries and associated folders have been purged'
start = time.clock()
- histories = h.filter( and_( h.table.c.deleted==True,
- h.table.c.purged==False,
- h.table.c.update_time < cutoff_time ) ) \
- .options( eagerload( 'datasets' ) ).all()
- for history in histories:
- errors = False
- for dataset_assoc in history.datasets:
- if dataset_assoc.deleted:
- datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all()
- for dataset in datasets:
- if dataset.purgable and not dataset.purged:
- file_size = dataset.file_size
- dataset.deleted = True
- dataset.file_size = 0
- if remove_from_disk:
- dataset.flush()
- errmsg = purge_dataset( dataset, d, m )
- if errmsg:
- errors = True
- print errmsg
- else:
- dataset.purged = True
- dataset.flush()
- print "%s" % dataset.file_name
- # Mark all associated MetadataFiles as deleted and purged
- print "The following metadata files associated with dataset '%s' have been marked purged" % dataset.file_name
- for hda in dataset.history_associations:
- for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- for lda in dataset.library_associations:
- for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- dataset_count += 1
- try:
- disk_space += file_size
- except:
- pass
- if not errors:
- history.purged = True
- history.flush()
- print "%d" % history.id
- history_count += 1
+ libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True,
+ app.model.Library.table.c.purged==False,
+ app.model.Library.table.c.update_time < cutoff_time ) ).all()
+ for library in libraries:
+ _purge_folder( library.root_folder, app, remove_from_disk, info_only = info_only )
+ if not info_only:
+ library.purged = True
+ print "%d" % library.id
+ library_count += 1
+ app.model.flush()
stop = time.clock()
- print '# Purged %d histories ( including a total of %d datasets ). Freed disk space: ' %( history_count, dataset_count ), disk_space, '\n'
+ print '# Purged %d libraries .' % ( library_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def info_purge_datasets( d, cutoff_time ):
- # Provide info about the datasets that will be affected if the purge_datasets function is executed.
- dataset_count = 0
- disk_space = 0
- print '# The following deleted datasets will be purged'
+def purge_folders( app, cutoff_time, remove_from_disk, info_only = False ):
+ # Purges deleted folders whose update_time is older than the cutoff_time.
+ # The dataset associations of each folder are also marked as deleted.
+ # The Purge Dataset method will purge each Dataset as necessary
+ # libraryFolder.purged == True simply means that it can no longer be undeleted
+ # i.e. all associated LibraryDatasets/folders are marked as deleted
+ folder_count = 0
+ print '# The following folders have been purged'
start = time.clock()
- datasets = d.filter( and_( d.table.c.deleted==True,
- d.table.c.purgable==True,
- d.table.c.purged==False,
- d.table.c.update_time < cutoff_time ) ).all()
- for dataset in datasets:
- print "%s" % dataset.file_name
- dataset_count += 1
- try:
- disk_space += dataset.file_size
- except:
- pass
+ folders = app.model.LibraryFolder.filter( and_( app.model.LibraryFolder.table.c.deleted==True,
+ app.model.LibraryFolder.table.c.purged==False,
+ app.model.LibraryFolder.table.c.update_time < cutoff_time ) ).all()
+ for folder in folders:
+ _purge_folder( folder, app, remove_from_disk, info_only = info_only )
+ print "%d" % folder.id
+ folder_count += 1
stop = time.clock()
- print '# %d datasets will be purged. Freed disk space: ' %dataset_count, disk_space, '\n'
+ print '# Purged %d folders.' % ( folder_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_datasets( d, m, cutoff_time, remove_from_disk ):
+def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False ):
# Purges deleted datasets whose update_time is older than cutoff_time. Files may or may
# not be removed from disk.
dataset_count = 0
disk_space = 0
- file_size = 0
print '# The following deleted datasets have been purged'
start = time.clock()
- datasets = d.filter( and_( d.table.c.deleted==True,
- d.table.c.purgable==True,
- d.table.c.purged==False,
- d.table.c.update_time < cutoff_time ) ).all()
+ datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True,
+ app.model.Dataset.table.c.purgable==True,
+ app.model.Dataset.table.c.purged==False,
+ app.model.Dataset.table.c.update_time < cutoff_time ) ).all()
for dataset in datasets:
file_size = dataset.file_size
- if remove_from_disk:
- errmsg = purge_dataset( dataset, d, m )
- if errmsg:
- print errmsg
- else:
- dataset_count += 1
- else:
- dataset.purged = True
- dataset.file_size = 0
- dataset.flush()
- print "%s" % dataset.file_name
- # Mark all associated MetadataFiles as deleted and purged
- print "The following metadata files associated with dataset '%s' have been marked purged" % dataset.file_name
- for hda in dataset.history_associations:
- for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- for lda in dataset.library_associations:
- for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- dataset_count += 1
+ _purge_dataset( dataset, remove_from_disk, info_only = info_only )
+ dataset_count += 1
try:
disk_space += file_size
except:
@@ -298,68 +199,90 @@
print '# Freed disk space: ', disk_space, '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_dataset( dataset, d, m ):
- # Removes the file from disk and updates the database accordingly.
+
+def _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children = True, info_only = False ):
+ #purging a dataset instance marks the instance as deleted,
+ #and marks the dataset as deleted if it is not associated with another DatsetInstance that is not deleted
+ if not info_only:
+ dataset_instance.mark_deleted( include_children = include_children )
+ dataset_instance.clear_associated_files()
+ dataset_instance.flush()
+ dataset_instance.dataset.refresh()
+ if _dataset_is_deletable( dataset_instance.dataset ):
+ _delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only = info_only )
+ #need to purge children here
+ if include_children:
+ for child in dataset_instance.children:
+ _purge_dataset_instance( child, app, remove_from_disk, include_children = include_children, info_only = info_only )
+
+def _dataset_is_deletable( dataset ):
+ #a dataset is deletable when it no longer has any non-deleted associations
+ return not bool( dataset.active_history_associations or dataset.active_library_associations )
+
+def _delete_dataset( dataset, app, remove_from_disk, info_only = False ):
+ #marks a base dataset as deleted, hdas/ldas associated with dataset can no longer be undeleted
+ #metadata files attached to associated dataset Instances is removed now
+ if not _dataset_is_deletable( dataset ):
+ print "# This Dataset (%i) is not deletable, associated Metadata Files will not be removed.\n" % ( dataset.id )
+ else:
+ # Mark all associated MetadataFiles as deleted and purged and remove them from disk
+ print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
+ metadata_files = []
+ #lets create a list of metadata files, then perform actions on them
+ for hda in dataset.history_associations:
+ for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.hda_id==hda.id ).all():
+ metadata_files.append( metadata_file )
+ for lda in dataset.library_associations:
+ for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.lda_id==lda.id ).all():
+ metadata_files.append( metadata_file )
+ for metadata_file in metadata_files:
+ if not info_only:
+ if remove_from_disk:
+ try:
+ os.unlink( metadata_file.file_name )
+ except Exception, e:
+ print "# Error, exception: %s caught attempting to purge metadata file %s\n" %( str( e ), metadata_file.file_name )
+ metadata_file.purged = True
+ metadata_file.deleted = True
+ #metadata_file.flush()
+ print "%s" % metadata_file.file_name
+ print
+ dataset.deleted = True
+ #dataset.flush()
+ app.model.flush()
+
+def _purge_dataset( dataset, remove_from_disk, info_only = False ):
if dataset.deleted:
- purgable = True
- # Remove files from disk and update the database
try:
- # See if the dataset has been shared
- if dataset.external_filename:
- # This check handles the pre-history_dataset_association approach to sharing.
- shared_data = d.filter( and_( d.table.c.external_filename==dataset.external_filename,
- d.table.c.deleted==False ) ).all()
- if shared_data:
- purgable = False
- if purgable:
- # This check handles the history_dataset_association approach to sharing.
- for shared_data in dataset.history_associations:
- # Check to see if another dataset is using this file. This happens when a user shares
- # their history with another user. In this case, a new record is created in the dataset
- # table for each dataset, but the dataset records point to the same data file on disk. So
- # if 1 of the 2 users deletes the dataset from their history but the other doesn't, we need
- # to keep the dataset on disk for the 2nd user.
- if not shared_data.deleted:
- purgable = False
- break
- if purgable:
- # This check handles the library_dataset_dataset_association approach to sharing.
- for shared_data in dataset.library_associations:
- if not shared_data.deleted:
- purgable = False
- break
- if purgable:
- dataset.purged = True
- dataset.file_size = 0
- dataset.flush()
- # Remove dataset file from disk
- os.unlink( dataset.file_name )
+ if dataset.purgable and _dataset_is_deletable( dataset ):
print "%s" % dataset.file_name
- # Mark all associated MetadataFiles as deleted and purged and remove them from disk
- print "The following metadata files associated with dataset '%s' have been purged" % dataset.file_name
- for hda in dataset.history_associations:
- for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all():
- os.unlink( metadata_file.file_name() )
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- for lda in dataset.library_associations:
- for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all():
- metadata_file.deleted = True
- metadata_file.purged = True
- metadata_file.flush()
- print "%s" % metadata_file.file_name()
- try:
- # Remove associated extra files from disk if they exist
- os.unlink( dataset.extra_files_path )
- except:
- pass
+ if not info_only:
+ # Remove files from disk and update the database
+ if remove_from_disk:
+ os.unlink( dataset.file_name )
+ # Remove associated extra files from disk if they exist
+ if dataset.extra_files_path and os.path.exists( dataset.extra_files_path ):
+ shutil.rmtree( dataset.extra_files_path ) #we need to delete the directory and its contents; os.unlink would always fail on a directory
+ dataset.purged = True
+ else:
+ print "# This dataset (%i) is not purgable, the file (%s) will not be removed.\n" % ( dataset.id, dataset.file_name )
except Exception, exc:
- return "# Error, exception: %s caught attempting to purge %s\n" %( str( exc ), dataset.file_name )
+ print "# Error, exception: %s caught attempting to purge %s\n" %( str( exc ), dataset.file_name )
else:
- return "# Error: '%s' has not previously been deleted, so it cannot be purged\n" %dataset.file_name
- return ""
+ print "# Error: '%s' has not previously been deleted, so it cannot be purged\n" % dataset.file_name
+ print ""
+
+def _purge_folder( folder, app, remove_from_disk, info_only = False ):
+ """Purges a folder and its contents, recursively"""
+ for ld in folder.datasets:
+ ld.deleted = True
+ for ldda in [ld.library_dataset_dataset_association] + ld.expired_datasets:
+ _purge_dataset_instance( ldda, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
+ for sub_folder in folder.folders:
+ _purge_folder( sub_folder, app, remove_from_disk, info_only = info_only )
+ if not info_only:
+ folder.purged = True
+ folder.flush()
class CleanupDatasetsApplication( object ):
"""Encapsulates the state of a Universe application"""
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/delete_userless_histories.sh
--- a/scripts/cleanup_datasets/delete_userless_histories.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/delete_userless_histories.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -2 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/delete_userless_histories_main.sh
--- a/scripts/cleanup_datasets/delete_userless_histories_main.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/delete_userless_histories_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -2 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_datasets.sh
--- a/scripts/cleanup_datasets/purge_datasets.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_datasets.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -6 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_datasets_main.sh
--- a/scripts/cleanup_datasets/purge_datasets_main.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_datasets_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -6 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_folders.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_folders.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_folders_main.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_folders_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_histories.sh
--- a/scripts/cleanup_datasets/purge_histories.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_histories.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_histories_main.sh
--- a/scripts/cleanup_datasets/purge_histories_main.sh Thu Apr 23 14:42:35 2009 -0400
+++ b/scripts/cleanup_datasets/purge_histories_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -1,4 +1,4 @@
#!/bin/sh
cd `dirname $0`/../..
-python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -4 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_libraries.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_libraries.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_libraries_main.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/cleanup_datasets/purge_libraries_main.sh Thu Apr 23 15:11:29 2009 -0400
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/base.css.tmpl Thu Apr 23 15:11:29 2009 -0400
@@ -563,6 +563,7 @@
color: #333;
font-size: 110%;
font-weight: bold;
+ font-style: normal;
white-space: nowrap;
position: absolute;
z-index: 20000;
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/blue/base.css Thu Apr 23 15:11:29 2009 -0400
@@ -559,6 +559,7 @@
color: #333;
font-size: 110%;
font-weight: bold;
+ font-style: normal;
white-space: nowrap;
position: absolute;
z-index: 20000;
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/blue/library.css
--- a/static/june_2007_style/blue/library.css Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/blue/library.css Thu Apr 23 15:11:29 2009 -0400
@@ -4,6 +4,10 @@
.datasetHighlighted {
background-color: #C1C9E5;
+}
+
+.libraryItemDeleted-True {
+ font-style: italic;
}
div.historyItemBody {
diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/library.css.tmpl
--- a/static/june_2007_style/library.css.tmpl Thu Apr 23 14:42:35 2009 -0400
+++ b/static/june_2007_style/library.css.tmpl Thu Apr 23 15:11:29 2009 -0400
@@ -4,6 +4,10 @@
.datasetHighlighted {
background-color: $menu_bg_over;
+}
+
+.libraryItemDeleted-True {
+ font-style: italic;
}
div.historyItemBody {
diff -r 0cf5c25d1d2b -r c6031c4e6546 templates/admin/library/browse_library.mako
--- a/templates/admin/library/browse_library.mako Thu Apr 23 14:42:35 2009 -0400
+++ b/templates/admin/library/browse_library.mako Thu Apr 23 15:11:29 2009 -0400
@@ -93,7 +93,7 @@
%>
%if not root_folder:
<li class="folderRow libraryOrFolderRow" style="padding-left: ${pad}px;">
- <div class="rowTitle">
+ <div class="rowTitle libraryItemDeleted-${parent.deleted}">
<img src="${h.url_for( expander )}" class="expanderIcon"/><img src="${h.url_for( folder )}" class="rowIcon"/>
${parent.name}
%if parent.description:
@@ -101,7 +101,7 @@
%endif
<a id="folder-${parent.id}-popup" class="popup-arrow" style="display: none;">▼</a>
</div>
- %if not deleted:
+ %if not parent.deleted:
<%
library_item_ids = {}
library_item_ids[ 'folder' ] = parent.id
@@ -117,10 +117,11 @@
<a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, folder_id=parent.id, new_template=True )}">Add an information template to this folder</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin', action='folder', permissions=True, id=parent.id, library_id=library_id )}">Edit this folder's permissions</a>
- ## TODO: need to revamp the way folders and contained LibraryDatasets are deleted
- ##%if subfolder:
- ## <a class="action-button" confirm="Click OK to delete the folder '${parent.name}'" href="${h.url_for( action='folder', delete=True, id=parent.id, library_id=library_id )}">Remove this folder and its contents from the library</a>
- ##%endif
+ <a class="action-button" confirm="Click OK to delete the folder '${parent.name}'" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library_id, library_item_id=parent.id, library_item_type='folder' )}">Remove this folder and its contents from the library</a>
+ </div>
+ %else:
+ <div popupmenu="folder-${parent.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library_id, library_item_id=parent.id, library_item_type='folder' )}">Undelete this folder</a>
</div>
%endif
</li>
@@ -130,10 +131,10 @@
%else:
<ul>
%endif
- %if library.deleted:
+ %if deleted:
<%
- parent_folders = parent.folders
- parent_datasets = parent.datasets
+ parent_folders = parent.activatable_folders
+ parent_datasets = parent.activatable_datasets
%>
%else:
<%
@@ -142,7 +143,7 @@
%>
%endif
%for folder in name_sorted( parent_folders ):
- ${render_folder( folder, pad, library.deleted, created_ldda_ids, library.id )}
+ ${render_folder( folder, pad, deleted, created_ldda_ids, library.id )}
%endfor
%for library_dataset in name_sorted( parent_datasets ):
<%
@@ -182,17 +183,20 @@
<table cellspacing="0" cellpadding="0" border="0" width="100%" class="libraryTitle">
<th width="*">
<img src="${h.url_for( '/static/images/silk/resultset_bottom.png' )}" class="expanderIcon"/><img src="${h.url_for( '/static/images/silk/book_open.png' )}" class="rowIcon"/>
- ${library.name}
- %if library.description:
- <i>- ${library.description}</i>
- %endif
+ <span class="libraryItemDeleted-${library.deleted}">
+ ${library.name}
+ %if library.description:
+ <i>- ${library.description}</i>
+ %endif
+ </span>
<a id="library-${library.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ <div popupmenu="library-${library.id}-popup">
%if not library.deleted:
<%
library_item_ids = {}
library_item_ids[ 'library' ] = library.id
%>
- <div popupmenu="library-${library.id}-popup">
+
<a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this library's information</a>
%if library.library_info_template_associations:
<% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %>
@@ -201,15 +205,16 @@
<a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a>
%endif
<a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this library's permissions</a>
- ## TODO: need to revamp the way libraries, folders, and contained LibraryDatasets are deleted
- ##<a class="action-button" confirm="Current state will not be saved, so undeleting the library will restore all of its contents. Click OK to delete the library named '${library.name}'?" href="${h.url_for( controller='admin', action='library', delete=True, id=library.id )}">Delete this library and its contents</a>
- </div>
- ##%else:
- ## <div popupmenu="library-${library.id}-popup">
- ## <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library', id=library.id )}">Undelete this library and its contents</a>
- ## <a class="action-button" href="${h.url_for( controller='admin', action='purge_library', id=library.id )}">Purge this library and its contents</a>
- ## </div>
+ <a class="action-button" confirm="Current state will not be saved, so undeleting the library will restore all of its contents. Click OK to delete the library named '${library.name}'?" href="${h.url_for( controller='admin', action='delete_library_item', library_item_type='library', library_item_id=library.id )}">Delete this library and its contents</a>
+ %if show_deleted:
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted library items</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted library items</a>
+ %endif
+ %elif not library.purged:
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_item_type='library', library_item_id=library.id )}">Undelete this library</a>
%endif
+ </div>
</th>
<th width="300">Information</th>
<th width="150">Uploaded By</th>
@@ -218,7 +223,7 @@
</div>
</li>
<ul>
- ${render_folder( library.root_folder, 0, library.deleted, created_ldda_ids, library.id )}
+ ${render_folder( library.root_folder, 0, library.deleted or show_deleted, created_ldda_ids, library.id )}
</ul>
<br/>
</ul>
diff -r 0cf5c25d1d2b -r c6031c4e6546 templates/admin/library/common.mako
--- a/templates/admin/library/common.mako Thu Apr 23 14:42:35 2009 -0400
+++ b/templates/admin/library/common.mako Thu Apr 23 15:11:29 2009 -0400
@@ -1,6 +1,6 @@
<% from time import strftime %>
-<%def name="render_dataset( library_dataset, selected, library )">
+<%def name="render_dataset( library_dataset, selected, library, show_deleted = False )">
<%
## The received data must always be a LibraryDataset object, but the object id passed to methods from the drop down menu
## should be the underlying ldda id to prevent id collision ( which could happen when displaying children, which are always
@@ -27,13 +27,15 @@
%else:
<input type="checkbox" name="ldda_ids" value="${ldda.id}"/>
%endif
- <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:50]}</b></a>
- %if not library.deleted:
+ <span class="libraryItemDeleted-${library_dataset.deleted}">
+ <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:50]}</b></a>
+ </span>
+ <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
+ %if not library_dataset.deleted:
<%
library_item_ids = {}
library_item_ids[ 'ldda' ] = ldda.id
%>
- <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="dataset-${ldda.id}-popup">
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a>
## We're disabling the ability to add templates at the LDDA and LibraryDataset level, but will leave this here for possible future use
@@ -46,7 +48,11 @@
<a class="action-button" href="${h.url_for( controller='admin', action='download_dataset_from_folder', id=ldda.id, library_id=library.id )}">Download this dataset</a>
%endif
##TODO: need to revamp the way we remove datasets from disk.
- ##<a class="action-button" confirm="Click OK to remove dataset '${ldda.name}'?" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, delete=True )}">Remove this dataset from the library</a>
+ <a class="action-button" confirm="Click OK to remove dataset '${ldda.name}'?" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='dataset' )}">Remove this dataset from the library</a>
+ </div>
+ %else:
+ <div popupmenu="dataset-${ldda.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='dataset' )}">Undelete this dataset</a>
</div>
%endif
</td>
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/6125f71c838a
changeset: 2449:6125f71c838a
user: James Taylor <james(a)jamestaylor.org>
date: Thu Jun 11 12:20:03 2009 -0400
description:
Merging Ian's trackster update with current head
11 file(s) affected in this change:
datatypes_conf.xml.sample
lib/galaxy/datatypes/data.py
lib/galaxy/datatypes/interval.py
lib/galaxy/datatypes/registry.py
lib/galaxy/tools/actions/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/web/controllers/root.py
lib/galaxy/web/framework/__init__.py
templates/dataset/edit_attributes.mako
tools/data_source/upload.xml
diffs (671 lines):
diff -r c69e55c91036 -r 6125f71c838a datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Thu Jun 11 12:06:29 2009 -0400
+++ b/datatypes_conf.xml.sample Thu Jun 11 12:20:03 2009 -0400
@@ -8,6 +8,9 @@
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
</datatype>
<datatype extension="binseq.zip" type="galaxy.datatypes.images:Binseq" mimetype="application/zip" display_in_upload="true"/>
+ <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
+ <!-- no converters yet -->
+ </datatype>
<datatype extension="coverage" type="galaxy.datatypes.coverage:LastzCoverage" display_in_upload="true">
<indexer file="coverage.xml" />
</datatype>
@@ -31,7 +34,7 @@
<datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/>
<datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
<converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
- <indexer file="interval.xml" />
+ <indexer file="interval_awk.xml" />
</datatype>
<datatype extension="jpg" type="galaxy.datatypes.images:Image" mimetype="image/jpeg"/>
<datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/chrominfo.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/chrominfo.py Thu Jun 11 12:20:03 2009 -0400
@@ -0,0 +1,14 @@
+import data
+from galaxy import util
+from galaxy.datatypes.sniff import *
+from galaxy.web import url_for
+from tabular import Tabular
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+
+
+class ChromInfo( Tabular ):
+ file_ext = "len"
+ MetadataElement( name="chrom", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+ MetadataElement( name="length", default=2, desc="Length column", param=metadata.ColumnParameter )
+
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Jun 11 12:20:03 2009 -0400
@@ -42,7 +42,7 @@
__metaclass__ = DataMeta
"""Add metadata elements"""
- MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.SelectParameter, multiple=False, values=util.dbnames, no_value="?" )
+ MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
"""Stores the set of display applications, and viewing methods, supported by this datatype """
supported_display_apps = {}
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/coverage.py
--- a/lib/galaxy/datatypes/indexers/coverage.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/coverage.py Thu Jun 11 12:20:03 2009 -0400
@@ -37,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/interval.awk
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval.awk Thu Jun 11 12:20:03 2009 -0400
@@ -0,0 +1,43 @@
+BEGIN {
+ # from galaxy.utils
+ mapped_chars[">"] = "__gt__"
+ mapped_chars["<"] = "__lt__"
+ mapped_chars["'"] = "__sq__"
+ mapped_chars["\""] = "__dq__"
+ mapped_chars["\\["] = "__ob__"
+ mapped_chars["\\]"] = "__cb__"
+ mapped_chars["\\{"] = "__oc__"
+ mapped_chars["\\}"] = "__cc__"
+ mapped_chars["@"] = "__at__"
+ # additional, not in galaxy.utils
+ mapped_chars["/"] = "__fs__"
+ mapped_chars["^manifest\.tab$"] = "__manifest.tab__"
+}
+function escape_filename( name )
+{
+ for( char in mapped_chars ) {
+ gsub( char, mapped_chars[char], name )
+ }
+ return name
+}
+!_[$chrom]++ {
+ # close files only when we switch to a new one.
+ fn && close(fn)
+ fn = storepath "/" escape_filename($1) }
+{
+ print $0 >> fn;
+ # the || part is needed to catch 0 length chromosomes, which
+ # should never happen but...
+ if ($end > chroms[$chrom] || !chroms[$chrom])
+ chroms[$chrom] = $end }
+END {
+ fn = storepath "/manifest.tab"
+ for( x in chroms ) {
+ # add line to manifest
+ print x "\t" chroms[x] >> fn
+ chromfile = storepath "/" escape_filename(x)
+ # sort in-place
+ system( "sort -f -n -k " chrom " -k " start " -k " end " -o " chromfile " " chromfile )
+ close(chromfile)
+ }
+}
\ No newline at end of file
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/interval.py
--- a/lib/galaxy/datatypes/indexers/interval.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/interval.py Thu Jun 11 12:20:03 2009 -0400
@@ -29,13 +29,11 @@
manifest[chrom] = max(manifest.get(chrom,0),line.end)
if not lastchrom == chrom:
if current_file:
- current_file.flush()
current_file.close()
current_file = open( os.path.join( out_path, "%s" % chrom), "a" )
print >> current_file, "\t".join(line)
lastchrom = chrom
if current_file:
- current_file.flush()
current_file.close()
return manifest
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/interval_awk.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/indexers/interval_awk.xml Thu Jun 11 12:20:03 2009 -0400
@@ -0,0 +1,16 @@
+<tool id="INDEXER_Interval_0" name="Index Interval for Track Viewer">
+ <!-- Used internally to generate track indexes -->
+ <command interpreter="awk -f">interval.awk
+ chrom=${input_dataset.metadata.chromCol} start=${input_dataset.metadata.startCol}
+ end=${input_dataset.metadata.endCol} strand=${input_dataset.metadata.strandCol}
+ storepath=${store_path}
+ $input_dataset 2>&1
+ </command>
+ <inputs>
+ <page>
+ <param format="interval" name="input_dataset" type="data" label="Choose intervals"/>
+ </page>
+ </inputs>
+ <help>
+ </help>
+</tool>
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/indexers/wiggle.py
--- a/lib/galaxy/datatypes/indexers/wiggle.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/indexers/wiggle.py Thu Jun 11 12:20:03 2009 -0400
@@ -18,6 +18,8 @@
from numpy import *
import tempfile
import os
+from galaxy.tracks.store import sanitize_name
+
def write_chrom(max, out_base, instream):
@@ -35,7 +37,7 @@
os.rename( fname+".npy", fname )
# Write average
- for window in 10, 100, 1000, 10000:
+ for window in 10, 100, 1000, 10000, 100000:
input = scores.copy()
size = len( input )
input.resize( ( ( size / window ), window ) )
@@ -60,7 +62,7 @@
LEN[chrom] = max2( LEN.get(chrom,0), pos+1 )
for chrom, stream in chroms.items():
stream.seek(0)
- prefix = os.path.join(sys.argv[2], chrom)
+ prefix = os.path.join(sys.argv[2], sanitize_name(chrom))
write_chrom( LEN[chrom], prefix, stream )
manifest_file = open( os.path.join( sys.argv[2], "manifest.tab" ),"w" )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/interval.py Thu Jun 11 12:20:03 2009 -0400
@@ -798,7 +798,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = ( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
# Memory map the array (don't load all the data)
data = numpy.load( data )
@@ -815,7 +815,7 @@
# Determine appropriate resolution to plot ~1000 points
resolution = math.ceil( 10 ** math.ceil( math.log10( range / 1000 ) ) )
# Restrict to valid range
- resolution = min( resolution, 10000 )
+ resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
return resolution
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/metadata.py Thu Jun 11 12:20:03 2009 -0400
@@ -289,7 +289,22 @@
if value is None: return []
if not isinstance( value, list ): return [value]
return value
-
+
+
+class DBKeyParameter( SelectParameter ):
+ def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd)
+
+ def get_html( self, value=None, context={}, other_values={}, values=None, **kwd):
+ try:
+ values = kwd['trans'].db_builds
+ except AttributeError: pass
+ return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd)
+
+
class RangeParameter( SelectParameter ):
def __init__( self, spec ):
SelectParameter.__init__( self, spec )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Jun 11 12:20:03 2009 -0400
@@ -3,7 +3,7 @@
"""
import os
import logging
-import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks
+import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
import galaxy.util
from galaxy.util.odict import odict
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/tools/actions/__init__.py Thu Jun 11 12:20:03 2009 -0400
@@ -107,6 +107,7 @@
out_data = {}
# Collect any input datasets from the incoming parameters
inp_data = self.collect_input_datasets( tool, incoming, trans )
+
# Deal with input dataset names, 'dbkey' and types
input_names = []
input_ext = 'data'
@@ -119,6 +120,16 @@
data = NoneDataset( datatypes_registry = trans.app.datatypes_registry )
if data.dbkey not in [None, '?']:
input_dbkey = data.dbkey
+
+ # Collect chromInfo dataset and add as parameters to incoming
+ db_datasets = {}
+ db_dataset = trans.db_dataset_for( input_dbkey )
+ if db_dataset:
+ db_datasets[ "chromInfo" ] = db_dataset
+ incoming[ "chromInfo" ] = db_dataset.file_name
+ else:
+ incoming[ "chromInfo" ] = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % input_dbkey )
+ inp_data.update( db_datasets )
# Determine output dataset permission/roles list
existing_datasets = [ inp for inp in inp_data.values() if inp ]
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Jun 11 12:20:03 2009 -0400
@@ -668,7 +668,7 @@
>>> # Create a mock transcation with 'hg17' as the current build
>>> from galaxy.util.bunch import Bunch
- >>> trans = Bunch( history=Bunch( genome_build='hg17' ) )
+ >>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.dbnames )
>>> p = GenomeBuildParameter( None, XML(
... '''
@@ -703,10 +703,10 @@
"""
def get_options( self, trans, other_values ):
last_used_build = trans.history.genome_build
- for dbkey, build_name in util.dbnames:
+ for dbkey, build_name in trans.db_builds:
yield build_name, dbkey, ( dbkey == last_used_build )
def get_legal_values( self, trans, other_values ):
- return set( dbkey for dbkey, _ in util.dbnames )
+ return set( dbkey for dbkey, _ in trans.db_builds )
class ColumnListParameter( SelectToolParameter ):
"""
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/tracks/store.py
--- a/lib/galaxy/tracks/store.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/tracks/store.py Thu Jun 11 12:20:03 2009 -0400
@@ -1,5 +1,17 @@
import os
+import re
from string import Template
+from galaxy.util import sanitize_text
+
+# extra mappings/escape to keep users from traversing around the
+# filesystem and wreaking havoc
+extra_mappings = { r"/": "__fs__", r"^manifest.tab$": "__manifest.tab__" }
+
+def sanitize_name( name ):
+ name = sanitize_text( name )
+ for key, value in extra_mappings.items():
+ name = re.sub( key, value, name )
+ return name
class TemplateSubber( object ):
def __init__(self, obj):
@@ -56,7 +68,7 @@
fd.close()
def _get_object_path( self, chrom, resolution ):
- object_name = chrom
+ object_name = sanitize_name(chrom)
if resolution: object_name += "_%d" % resolution
return os.path.join( self.path, object_name )
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Thu Jun 11 12:20:03 2009 -0400
@@ -236,11 +236,15 @@
if spec.get("readonly"):
continue
optional = params.get("is_"+name, None)
+ other = params.get("or_"+name, None)
if optional and optional == 'true':
# optional element... == 'true' actually means it is NOT checked (and therefore omitted)
setattr(data.metadata, name, None)
else:
- setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
+ if other:
+ setattr( data.metadata, name, other )
+ else:
+ setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
data.datatype.after_edit( data )
trans.app.model.flush()
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Thu Jun 11 12:20:03 2009 -0400
@@ -1,13 +1,11 @@
-from mako import exceptions
-from mako.template import Template
-from mako.lookup import TemplateLookup
+import math
+
+import mimeparse
+from galaxy.tracks import messages
+from galaxy.util.json import to_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
-from galaxy import web
-from galaxy.tracks import messages
-import mimeparse
-from galaxy.util.json import to_json_string
-import math
+
class MultiResponse(object):
"""
@@ -82,18 +80,19 @@
def build( self, trans, **kwargs ):
trans.session["track_sets"] = list(kwargs.keys())
trans.session.save()
- waiting = False
- for id, value in kwargs.items():
- status = self.data_handler( trans, id )
- if status == messages.PENDING:
- waiting = True
- if not waiting:
- return trans.response.send_redirect( web.url_for( controller='tracks', action='chroms', dbkey=trans.session["track_dbkey"]) )
- return trans.fill_template( 'tracks/build.mako' )
+ #waiting = False
+ #for id, value in kwargs.items():
+ # status = self.data_handler( trans, id )
+ # if status == messages.PENDING:
+ # waiting = True
+ #if not waiting:
+ return trans.response.send_redirect( web.url_for( controller='tracks/', action='index', chrom="" ) )
+ #return trans.fill_template( 'tracks/build.mako' )
@web.expose
def index(self, trans, **kwargs):
tracks = []
+ dbkey = ""
for track in trans.session["track_sets"]:
dataset = trans.app.model.HistoryDatasetAssociation.get( track )
tracks.append({
@@ -101,17 +100,23 @@
"name": dataset.name,
"id": dataset.id
})
+ dbkey = dataset.dbkey
chrom = kwargs.get("chrom","")
LEN = self.chroms_handler(trans, trans.session["track_dbkey"]).get(chrom,0)
return trans.fill_template( 'tracks/index.mako',
- tracks=tracks, chrom=chrom,
+ tracks=tracks, chrom=chrom, dbkey=dbkey,
LEN=LEN )
def chroms_handler(self, trans, dbkey ):
- db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ db_manifest = trans.db_dataset_for( dbkey )
+ if not db_manifest:
+ db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
+ else:
+ db_manifest = db_manifest.file_name
manifest = {}
if os.path.exists( db_manifest ):
for line in open( db_manifest ):
+ if line.startswith("#"): continue
line = line.rstrip("\r\n")
fields = line.split("\t")
manifest[fields[0]] = int(fields[1])
diff -r c69e55c91036 -r 6125f71c838a lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Thu Jun 11 12:06:29 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Thu Jun 11 12:20:03 2009 -0400
@@ -549,6 +549,31 @@
template = Template( source=template_string,
searchList=[context or kwargs, dict(caller=self)] )
return str(template)
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ dbnames = list()
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ if len(datasets) > 0:
+ dbnames.append( (util.dbnames.default_value, '--------- User Defined Builds ----------') )
+ for dataset in datasets:
+ dbnames.append( (dataset.dbkey, dataset.name) )
+ dbnames.extend( util.dbnames )
+ return dbnames
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ datasets = self.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=self.history.id, extension="len").all()
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
class FormBuilder( object ):
"""
diff -r c69e55c91036 -r 6125f71c838a static/scripts/trackster.js
--- a/static/scripts/trackster.js Thu Jun 11 12:06:29 2009 -0400
+++ b/static/scripts/trackster.js Thu Jun 11 12:20:03 2009 -0400
@@ -85,7 +85,7 @@
var resolution = Math.pow( 10, Math.ceil( Math.log( range / DENSITY ) / Math.log( 10 ) ) );
resolution = Math.max( resolution, 1 );
- resolution = Math.min( resolution, 10000 );
+ resolution = Math.min( resolution, 100000 );
var parent_element = $("<div style='position: relative;'></div>");
this.content_div.children( ":first" ).remove();
@@ -152,10 +152,20 @@
var low = position * DENSITY * resolution;
var high = ( position + 1 ) * DENSITY * resolution;
cache[resolution][position] = { state: "loading" };
- $.getJSON( "data" + this.type, { chr: this.view.chr, low: low, high: high, dataset_id: this.track.dataset_id }, function ( data ) {
- cache[resolution][position] = { state: "loaded", values: data };
- $(document).trigger( "redraw" );
- });
+ // use closure to preserve this and parameters for getJSON
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "data" + ref.type, { chr: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
+ if( data == "pending" ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ cache[resolution][position] = { state: "loaded", values: data };
+ }
+ $(document).trigger( "redraw" );
+ });
+ };
+ }(this);
+ fetcher();
}
return cache[resolution][position];
}
@@ -288,8 +298,11 @@
var chunk = this.cache.get( resolution, tile_index );
if ( chunk.state == "loading" ) {
- return null;
- }
+ parent_element.addClass("loading");
+ return null;
+ } else {
+ parent_element.removeClass("loading");
+ }
var values = chunk.values;
for ( var index in values ) {
diff -r c69e55c91036 -r 6125f71c838a static/trackster.css
--- a/static/trackster.css Thu Jun 11 12:06:29 2009 -0400
+++ b/static/trackster.css Thu Jun 11 12:20:03 2009 -0400
@@ -85,7 +85,10 @@
}
.loading {
- background: #DDDDDD;
+ background-image: url("/static/images/loading_large_white_bg.gif");
+ background-position: center center;
+ background-repeat: no-repeat;
+ min-height: 100px;
}
.label-track .label {
diff -r c69e55c91036 -r 6125f71c838a templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Thu Jun 11 12:06:29 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Thu Jun 11 12:20:03 2009 -0400
@@ -46,7 +46,7 @@
${spec.desc}:
</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- ${data.metadata.get_html_by_name( name )}
+ ${data.metadata.get_html_by_name( name, trans=trans )}
</div>
<div style="clear: both"></div>
</div>
diff -r c69e55c91036 -r 6125f71c838a templates/tracks/index.mako
--- a/templates/tracks/index.mako Thu Jun 11 12:06:29 2009 -0400
+++ b/templates/tracks/index.mako Thu Jun 11 12:20:03 2009 -0400
@@ -17,18 +17,19 @@
${parent.late_javascripts()}
<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script>
+<script type="text/javascript">
- var view = new View( "${chrom}", ${LEN}, 0, ${LEN} );
+ var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
var tracks = new TrackLayout( view );
-
+ var dbkey = "${dbkey}";
+
$(function() {
tracks.add( new LabelTrack( view, $("#viewport" ) ) );
%for track in tracks:
tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
%endfor
-
+
$(document).bind( "redraw", function( e ) {
tracks.redraw();
});
@@ -56,9 +57,43 @@
view.high = new_high;
tracks.redraw();
});
+ tracks.redraw();
+ load_chroms();
+ });
- tracks.redraw();
- });
+ var load_chroms = function () {
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "chroms", { dbkey: dbkey }, function ( data ) {
+ // Hacky - check length of "object"
+ var chrom_length = 0;
+ for (key in data) chrom_length++;
+ if( chrom_length == 0 ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ var chrom_options = '';
+ for (key in data) {
+ if( key == view.chr ) {
+ chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
+ } else {
+ chrom_options += '<option value="' + key + '">' + key + '</option>';
+ }
+ }
+ $("#chrom").html(chrom_options);
+ $("#chrom").bind( "change", function ( e ) {
+ $("#chr").submit();
+ });
+ if( view.chr == "" ) {
+ $("#chrom option:first").attr("selected", true);
+ $("#chrom").trigger( "change" );
+ }
+ }
+ });
+ };
+ }(this);
+ fetcher();
+ };
+
</script>
</%def>
@@ -79,11 +114,14 @@
<div id="nav">
<div id="nav-controls">
+ <form name="chr" id="chr" method="GET">
<a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
<a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
-
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing ${chrom}:<span id="low">0</span>-<span id="high">180857866</span></span>
-
+ <span style="display: inline-block; width: 30em; text-align: center;">Viewing
+ <select id="chrom" name="chrom">
+ <option value="">loading</option>
+ </select>
+ <span id="low">0</span>-<span id="high">180857866</span></span>
<span style="display: inline-block; width: 10em;">
<a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
<a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
@@ -91,6 +129,7 @@
<a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
<a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
+ </form>
</div>
</div>
diff -r c69e55c91036 -r 6125f71c838a tools/annotation_profiler/annotation_profiler.xml
--- a/tools/annotation_profiler/annotation_profiler.xml Thu Jun 11 12:06:29 2009 -0400
+++ b/tools/annotation_profiler/annotation_profiler.xml Thu Jun 11 12:20:03 2009 -0400
@@ -1,6 +1,6 @@
<tool id="Annotation_Profiler_0" name="Profile Annotations" Version="1.0.0">
<description>for a set of genomic intervals</description>
- <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len -b 3 -t $table_names</command>
+ <command interpreter="python">annotation_profiler_for_interval.py -i $input1 -c ${input1.metadata.chromCol} -s ${input1.metadata.startCol} -e ${input1.metadata.endCol} -o $out_file1 $keep_empty -p /depot/data2/galaxy/annotation_profiler/$dbkey $summary -l ${chromInfo} -b 3 -t $table_names</command>
<inputs>
<param format="interval" name="input1" type="data" label="Choose Intervals">
<validator type="dataset_metadata_in_file" filename="annotation_profiler_valid_builds.txt" metadata_name="dbkey" metadata_column="0" message="Profiling is not currently available for this species."/>
diff -r c69e55c91036 -r 6125f71c838a tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Jun 11 12:06:29 2009 -0400
+++ b/tools/data_source/upload.xml Thu Jun 11 12:20:03 2009 -0400
@@ -25,6 +25,7 @@
</param>
</upload_dataset>
<param name="dbkey" type="genomebuild" label="Genome" />
+ <param name="other_dbkey" type="text" label="Or user-defined Genome" />
</inputs>
<help>
diff -r c69e55c91036 -r 6125f71c838a tools/new_operations/complement.xml
--- a/tools/new_operations/complement.xml Thu Jun 11 12:06:29 2009 -0400
+++ b/tools/new_operations/complement.xml Thu Jun 11 12:20:03 2009 -0400
@@ -1,6 +1,6 @@
<tool id="gops_complement_1" name="Complement">
<description>intervals of a query</description>
- <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey}.len $allchroms</command>
+ <command interpreter="python">gops_complement.py $input1 $output -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol} -l ${chromInfo} $allchroms</command>
<inputs>
<param format="interval" name="input1" type="data">
<label>Complement regions of</label>
@@ -58,4 +58,4 @@
.. image:: ../static/operation_icons/gops_complement.gif
</help>
-</tool>
\ No newline at end of file
+</tool>
1
0
16 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/143dde05e1df
changeset: 2450:143dde05e1df
user: James Taylor <james(a)jamestaylor.org>
date: Thu Jun 11 15:37:19 2009 -0400
description:
Fixing up track browser UI. No longer uses session, cleaned up a bunch of unceccesary stuff. Still pretty limited -- coverage indexing doesn't seem to work right
13 file(s) affected in this change:
lib/galaxy/web/controllers/tracks.py
static/scripts/trackster.js
static/trackster.css
templates/base_panels.mako
templates/tracks/browser.mako
templates/tracks/build.mako
templates/tracks/chroms.mako
templates/tracks/datasets.mako
templates/tracks/dbkeys.mako
templates/tracks/debug.mako
templates/tracks/index.mako
templates/tracks/new_browser.mako
templates/tracks/view.mako
diffs (839 lines):
diff -r 6125f71c838a -r 143dde05e1df lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Thu Jun 11 12:20:03 2009 -0400
+++ b/lib/galaxy/web/controllers/tracks.py Thu Jun 11 15:37:19 2009 -0400
@@ -1,113 +1,96 @@
+"""
+Support for constructing and viewing custom "track" browsers within Galaxy.
+
+Track browsers are currently transient -- nothing is stored to the database
+when a browser is created. Building a browser consists of selecting a set
+of datasets associated with the same dbkey to display. Once selected, jobs
+are started to create any neccesary indexes in the background, and the user
+is redirected to the browser interface, which loads the appropriate datasets.
+
+Problems
+--------
+ - Assumes that the only indexing type in Galaxy is for this particular
+ application. Thus, datatypes can only have one indexer, and the presence
+ of an indexer results in assuming that datatype can be displayed as a track.
+
+"""
+
import math
-import mimeparse
from galaxy.tracks import messages
from galaxy.util.json import to_json_string
from galaxy.web.base.controller import *
from galaxy.web.framework import simplejson
-
-class MultiResponse(object):
+class TracksController( BaseController ):
"""
- Shamelessly ripped off of a django snippet.
+ Controller for track browser interface. Handles building a new browser from
+ datasets in the current history, and display of the resulting browser.
"""
- def __init__(self, handlers):
- self.handlers = handlers
-
- def __call__(self, view_func):
- def wrapper(that, trans, *args, **kwargs):
- data_resource = view_func(that, trans, *args, **kwargs)
- content_type = mimeparse.best_match(self.handlers.keys(),
- trans.request.environ['HTTP_ACCEPT'])
- response = self.handlers[content_type](data_resource, trans)
- trans.response.headers['Content-Type'] = "%s" % content_type
- return response
- return wrapper
-
- @classmethod
- def JSON( cls, data_resource, trans ):
- return simplejson.dumps( data_resource )
- class XML( object ):
- def __call__(self, data_resource, trans ):
- raise NotImplementedError( "XML MultiResponse handler is not implemented." )
-
- class AMF( object ):
- def __call__(self, data_resource, trans ):
- raise NotImplementedError( "XML MultiResponse handler is not implemented." )
-
- class HTML( object ):
- def __init__(self, template ):
- self.template = template
-
- def __call__(self, data_resource, trans ):
- return trans.fill_template( self.template, data_resource=data_resource, trans=trans )
-
-class WebRoot( BaseController ):
+ @web.expose
+ def index( self, trans ):
+ return trans.fill_template( "tracks/index.mako" )
+
+ @web.expose
+ def new_browser( self, trans, dbkey=None, dataset_ids=None, browse=None ):
+ """
+ Build a new browser from datasets in the current history. Redirects
+ to 'index' once datasets to browse have been selected.
+ """
+ session = trans.sa_session
+ # If the user clicked the submit button explicately, try to build the browser
+ if browse and dataset_ids:
+ dataset_ids = ",".join( map( str, dataset_ids ) )
+ trans.response.send_redirect( web.url_for( controller='tracks', action='browser', chrom="", dataset_ids=dataset_ids ) )
+ return
+ # Determine the set of all dbkeys that are used in the current history
+ dbkeys = [ d.metadata.dbkey for d in trans.get_history().datasets if not d.deleted ]
+ dbkey_set = set( dbkeys )
+ # If a dbkey argument was not provided, or is no longer valid, default
+ # to the first one
+ if dbkey is None or dbkey not in dbkey_set:
+ dbkey = dbkeys[0]
+ # Find all datasets in the current history that are of that dbkey and
+ # have an indexer.
+ datasets = {}
+ for dataset in session.query( model.HistoryDatasetAssociation ).filter_by( deleted=False, history_id=trans.history.id ):
+ if dataset.metadata.dbkey == dbkey and trans.app.datatypes_registry.get_indexers_by_datatype( dataset.extension ):
+ datasets[dataset.id] = dataset.name
+ # Render the template
+ return trans.fill_template( "tracks/new_browser.mako", dbkey=dbkey, dbkey_set=dbkey_set, datasets=datasets )
@web.expose
- @MultiResponse( {'text/html': MultiResponse.HTML( "tracks/dbkeys.mako"),
- 'text/javascript':MultiResponse.JSON} )
- def dbkeys(self, trans ):
- return list(set([x.metadata.dbkey for x in trans.get_history().datasets if not x.deleted]))
-
- @web.expose
- @MultiResponse( {'text/html':MultiResponse.HTML( "tracks/chroms.mako" ),
- 'text/javascript':MultiResponse.JSON} )
- def chroms(self, trans, dbkey=None):
- return self.chroms_handler( trans, dbkey )
-
- @web.expose
- @MultiResponse( {'text/html':MultiResponse.HTML( "tracks/datasets.mako" ),
- 'text/javascript':MultiResponse.JSON} )
- def list(self, trans, dbkey=None ):
- trans.session["track_dbkey"] = dbkey
- trans.session.save()
- datasets = trans.app.model.HistoryDatasetAssociation.filter_by(deleted=False, history_id=trans.history.id).all()
- dataset_list = {}
- for dataset in datasets:
- if dataset.metadata.dbkey == dbkey and trans.app.datatypes_registry.get_indexers_by_datatype( dataset.extension ):
- dataset_list[dataset.id] = dataset.name
- return dataset_list
-
- @web.expose
- @MultiResponse( {'text/html':MultiResponse.JSON,
- 'text/javascript':MultiResponse.JSON} )
- def data(self, trans, dataset_id=None, chr="", low="", high=""):
- return self.data_handler( trans, dataset_id, chrom=chr, low=low, high=high )
-
- @web.expose
- def build( self, trans, **kwargs ):
- trans.session["track_sets"] = list(kwargs.keys())
- trans.session.save()
- #waiting = False
- #for id, value in kwargs.items():
- # status = self.data_handler( trans, id )
- # if status == messages.PENDING:
- # waiting = True
- #if not waiting:
- return trans.response.send_redirect( web.url_for( controller='tracks/', action='index', chrom="" ) )
- #return trans.fill_template( 'tracks/build.mako' )
-
- @web.expose
- def index(self, trans, **kwargs):
+ def browser(self, trans, dataset_ids, chrom=""):
+ """
+ Display browser for the datasets listed in `dataset_ids`.
+ """
tracks = []
dbkey = ""
- for track in trans.session["track_sets"]:
- dataset = trans.app.model.HistoryDatasetAssociation.get( track )
- tracks.append({
- "type": dataset.datatype.get_track_type(),
- "name": dataset.name,
- "id": dataset.id
- })
+ for dataset_id in dataset_ids.split( "," ):
+ dataset = trans.app.model.HistoryDatasetAssociation.get( dataset_id )
+ tracks.append( {
+ "type": dataset.datatype.get_track_type(),
+ "name": dataset.name,
+ "id": dataset.id
+ } )
dbkey = dataset.dbkey
- chrom = kwargs.get("chrom","")
- LEN = self.chroms_handler(trans, trans.session["track_dbkey"]).get(chrom,0)
- return trans.fill_template( 'tracks/index.mako',
- tracks=tracks, chrom=chrom, dbkey=dbkey,
+ LEN = self._chroms(trans, dbkey ).get(chrom,0)
+ return trans.fill_template( 'tracks/browser.mako',
+ dataset_ids=dataset_ids,
+ tracks=tracks,
+ chrom=chrom,
+ dbkey=dbkey,
LEN=LEN )
-
- def chroms_handler(self, trans, dbkey ):
+
+ @web.json
+ def chroms(self, trans, dbkey=None ):
+ return self._chroms( trans, dbkey )
+
+ def _chroms( self, trans, dbkey ):
+ """
+ Called by the browser to get a list of valid chromosomes and lengths
+ """
db_manifest = trans.db_dataset_for( dbkey )
if not db_manifest:
db_manifest = os.path.join( trans.app.config.tool_data_path, 'shared','ucsc','chrom', "%s.len" % dbkey )
@@ -134,7 +117,11 @@
pass
return manifest
- def data_handler( self, trans, dataset_id, chrom="", low="", high="" ):
+ @web.json
+ def data( self, trans, dataset_id, chrom="", low="", high="" ):
+ """
+ Called by the browser to request a block of data
+ """
dataset = trans.app.model.HistoryDatasetAssociation.get( dataset_id )
if not dataset: return messages.NO_DATA
if dataset.state == trans.app.model.Job.states.ERROR:
diff -r 6125f71c838a -r 143dde05e1df static/scripts/trackster.js
--- a/static/scripts/trackster.js Thu Jun 11 12:20:03 2009 -0400
+++ b/static/scripts/trackster.js Thu Jun 11 15:37:19 2009 -0400
@@ -155,7 +155,7 @@
// use closure to preserve this and parameters for getJSON
var fetcher = function (ref) {
return function () {
- $.getJSON( "data" + ref.type, { chr: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
+ $.getJSON( TRACKSTER_DATA_URL + ref.type, { chrom: ref.view.chr, low: low, high: high, dataset_id: ref.track.dataset_id }, function ( data ) {
if( data == "pending" ) {
setTimeout( fetcher, 5000 );
} else {
@@ -218,6 +218,7 @@
var y1 = data[i][1];
var x2 = data[i+1][0] - tile_low;
var y2 = data[i+1][1];
+ console.log( x1, y1, x2, y2 );
// Missing data causes us to stop drawing
if ( isNaN( y1 ) || isNaN( y2 ) ) {
in_path = false;
diff -r 6125f71c838a -r 143dde05e1df static/trackster.css
--- a/static/trackster.css Thu Jun 11 12:20:03 2009 -0400
+++ b/static/trackster.css Thu Jun 11 15:37:19 2009 -0400
@@ -1,5 +1,5 @@
body {
- margin: 4em 0;
+ margin: 0 0;
padding: 0;
font-family: verdana;
font-size: 75%;
diff -r 6125f71c838a -r 143dde05e1df templates/base_panels.mako
--- a/templates/base_panels.mako Thu Jun 11 12:20:03 2009 -0400
+++ b/templates/base_panels.mako Thu Jun 11 15:37:19 2009 -0400
@@ -148,13 +148,22 @@
<span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span>
</%def>
- ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")}
-
${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))}
${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
- ${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
+ ${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
+
+ %if app.config.get_bool( 'enable_tracks', False ):
+ <span class="tab">
+ Visualization
+ <div class="submenu">
+ <ul>
+ <li><a href="${h.url_for( controller='tracks', action='index' )}">Build track browser</a></li>
+ </ul>
+ </div>
+ </span>
+ %endif
${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/browser.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/tracks/browser.mako Thu Jun 11 15:37:19 2009 -0400
@@ -0,0 +1,132 @@
+<%inherit file="/base.mako"/>
+
+<%def name="stylesheets()">
+${parent.stylesheets()}
+<link rel="stylesheet" type="text/css" href="/static/trackster.css" />
+</%def>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
+<script type="text/javascript" src="/static/scripts/trackster.js"></script>
+<script type="text/javascript">
+
+ ## HACK
+ TRACKSTER_DATA_URL = "${h.url_for( action='data' )}";
+
+ var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
+ var tracks = new TrackLayout( view );
+ var dbkey = "${dbkey}";
+
+ $(function() {
+
+ tracks.add( new LabelTrack( view, $("#viewport" ) ) );
+ %for track in tracks:
+ tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
+ %endfor
+
+ $(document).bind( "redraw", function( e ) {
+ tracks.redraw();
+ });
+
+ $(window).resize( function( e ) {
+ tracks.redraw();
+ });
+
+ $("#viewport").bind( "dragstart", function ( e ) {
+ this.original_low = view.low;
+ }).bind( "drag", function( e ) {
+ var move_amount = ( e.offsetX - this.offsetLeft ) / this.offsetWidth;
+ var range = view.high - view.low;
+ var move_bases = Math.round( range * move_amount );
+ var new_low = this.original_low - move_bases;
+ if ( new_low < 0 ) {
+ new_low = 0;
+ }
+ var new_high = new_low + range;
+ if ( new_high > view.length ) {
+ new_high = view.length;
+ new_low = new_high - range;
+ }
+ view.low = new_low;
+ view.high = new_high;
+ tracks.redraw();
+ });
+ tracks.redraw();
+ load_chroms();
+ });
+
+ var load_chroms = function () {
+ var fetcher = function (ref) {
+ return function () {
+ $.getJSON( "${h.url_for( action='chroms' )}", { dbkey: dbkey }, function ( data ) {
+ // Hacky - check length of "object"
+ var chrom_length = 0;
+ for (key in data) chrom_length++;
+ if( chrom_length == 0 ) {
+ setTimeout( fetcher, 5000 );
+ } else {
+ var chrom_options = '';
+ for (key in data) {
+ if( key == view.chr ) {
+ chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
+ } else {
+ chrom_options += '<option value="' + key + '">' + key + '</option>';
+ }
+ }
+ $("#chrom").html(chrom_options);
+ $("#chrom").bind( "change", function ( e ) {
+ $("#chr").submit();
+ });
+ if( view.chr == "" ) {
+ $("#chrom option:first").attr("selected", true);
+ $("#chrom").trigger( "change" );
+ }
+ }
+ });
+ };
+ }(this);
+ fetcher();
+ };
+
+</script>
+</%def>
+
+
+<div id="content">
+
+ <div id="overview">
+ <div id="overview-viewport">
+ <div id="overview-box"></div>
+ </div>
+ </div>
+
+
+ <div id="viewport">
+ </div>
+
+</div>
+ <div id="nav">
+
+ <div id="nav-controls">
+ <form name="chr" id="chr" method="GET">
+ <input type="hidden" name="dataset_ids" value="${dataset_ids}" />
+ <a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
+ <a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
+ <span style="display: inline-block; width: 30em; text-align: center;">Viewing
+ <select id="chrom" name="chrom">
+ <option value="">loading</option>
+ </select>
+ <span id="low">0</span>-<span id="high">180857866</span></span>
+ <span style="display: inline-block; width: 10em;">
+ <a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
+ <a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
+ </span>
+
+ <a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
+ <a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
+ </form>
+ </div>
+
+ </div>
+
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/build.mako
--- a/templates/tracks/build.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<script type="text/javascript">
- setTimeout(function () {
- window.location.reload();
- }, 5000 );
-</script>
-
-<div class="donemessage">
-<p>
-Please wait while we index your tracks for viewing. You will be
-automatically redirected to choose a chromosome to view after indices
-are built.
-</p>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/chroms.mako
--- a/templates/tracks/chroms.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<div class="form">
- <div class="form-title">Select Chromosome/Contig/Scaffold/etc.</div>
- <div id="dbkey" class="form-body">
- <form action="/tracks/index" method="GET" target="_parent">
- <div class="form-row">
- <label for="dbkey">Chrom: </label>
- <div class="form-row-input">
- <select name="chrom" id="chrom">
- %for chrom in data_resource:
- <option value="${chrom}">${chrom}</option>
- %endfor
- </select>
- </div>
- <div style="clear: both;"></div>
- </div>
- <div class="form-row">
- <input type="submit" value="View" />
- </div>
- </form>
- </div>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/datasets.mako
--- a/templates/tracks/datasets.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<div class="form">
- <div class="form-title">Select Datasets to View</div>
- <div id="dbkey" class="form-body">
- <form action="/tracks/build" method="GET">
- %for key,value in data_resource.items():
- <div class="form-row">
- <label for="${key}">${value}</label>
- <div class="form-row-input">
- <input type="checkbox" name="${key}" />
- </div>
- <div style="clear: both;"></div>
- </div>
- %endfor
- <div class="form-row">
- <input type="submit" value="Build..." />
- </div>
- </form>
- </div>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/dbkeys.mako
--- a/templates/tracks/dbkeys.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-<%inherit file="/base.mako"/>
-
-<%def name="init()">
-<%
- self.active_view="view"
- self.has_left_panel=False
-%>
-</%def>
-
-<div class="form">
- <div class="form-title">Select DBKey</div>
- <div id="dbkey" class="form-body">
- <form action="/tracks/list" method="GET">
- <div class="form-row">
- <label for="dbkey">DBKey: </label>
- <div class="form-row-input">
- <select name="dbkey" id="dbkey">
- %for dbkey in data_resource:
- <option value="${dbkey}">${dbkey}</option>
- %endfor
- </select>
- </div>
- <div style="clear: both;"></div>
- </div>
- <div class="form-row">
- <input type="submit" value="Select Datasets..."/>
- </div>
- </form>
- </div>
-</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/debug.mako
--- a/templates/tracks/debug.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-${data_resource}
\ No newline at end of file
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/index.mako
--- a/templates/tracks/index.mako Thu Jun 11 12:20:03 2009 -0400
+++ b/templates/tracks/index.mako Thu Jun 11 15:37:19 2009 -0400
@@ -2,149 +2,15 @@
<%def name="init()">
<%
- self.active_view="tracks"
self.has_left_panel=False
self.has_right_panel=False
+ self.active_view="visualization"
+ self.message_box_visible=False
%>
</%def>
-<%def name="stylesheets()">
-${parent.stylesheets()}
-<link rel="stylesheet" type="text/css" href="/static/trackster.css" />
-</%def>
+<%def name="center_panel()">
-<%def name="late_javascripts()">
-${parent.late_javascripts()}
-<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
-<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script type="text/javascript">
+ <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="tracks", action="new_browser" )}"> </iframe>
- var view = new View( "${chrom}", ${LEN}, 0, ${max(LEN,1)} );
- var tracks = new TrackLayout( view );
- var dbkey = "${dbkey}";
-
- $(function() {
-
- tracks.add( new LabelTrack( view, $("#viewport" ) ) );
- %for track in tracks:
- tracks.add( new ${track["type"]}( "${track["name"]}", view, $("#viewport" ), ${track["id"]} ) );
- %endfor
-
- $(document).bind( "redraw", function( e ) {
- tracks.redraw();
- });
-
- $(window).resize( function( e ) {
- tracks.redraw();
- });
-
- $("#viewport").bind( "dragstart", function ( e ) {
- this.original_low = view.low;
- }).bind( "drag", function( e ) {
- var move_amount = ( e.offsetX - this.offsetLeft ) / this.offsetWidth;
- var range = view.high - view.low;
- var move_bases = Math.round( range * move_amount );
- var new_low = this.original_low - move_bases;
- if ( new_low < 0 ) {
- new_low = 0;
- }
- var new_high = new_low + range;
- if ( new_high > view.length ) {
- new_high = view.length;
- new_low = new_high - range;
- }
- view.low = new_low;
- view.high = new_high;
- tracks.redraw();
- });
- tracks.redraw();
- load_chroms();
- });
-
- var load_chroms = function () {
- var fetcher = function (ref) {
- return function () {
- $.getJSON( "chroms", { dbkey: dbkey }, function ( data ) {
- // Hacky - check length of "object"
- var chrom_length = 0;
- for (key in data) chrom_length++;
- if( chrom_length == 0 ) {
- setTimeout( fetcher, 5000 );
- } else {
- var chrom_options = '';
- for (key in data) {
- if( key == view.chr ) {
- chrom_options += '<option value="' + key + '" selected="true">' + key + '</option>';
- } else {
- chrom_options += '<option value="' + key + '">' + key + '</option>';
- }
- }
- $("#chrom").html(chrom_options);
- $("#chrom").bind( "change", function ( e ) {
- $("#chr").submit();
- });
- if( view.chr == "" ) {
- $("#chrom option:first").attr("selected", true);
- $("#chrom").trigger( "change" );
- }
- }
- });
- };
- }(this);
- fetcher();
- };
-
-</script>
-</%def>
-
-<%def name="center_panel()">
-<div id="content">
-
- <div id="overview">
- <div id="overview-viewport">
- <div id="overview-box"></div>
- </div>
- </div>
-
-
- <div id="viewport">
- </div>
-
-</div>
- <div id="nav">
-
- <div id="nav-controls">
- <form name="chr" id="chr" method="GET">
- <a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
- <a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing
- <select id="chrom" name="chrom">
- <option value="">loading</option>
- </select>
- <span id="low">0</span>-<span id="high">180857866</span></span>
- <span style="display: inline-block; width: 10em;">
- <a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
- <a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
- </span>
-
- <a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
- <a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
- </form>
- </div>
-
- </div>
-</%def>
-
-<%def name="right_panel()">
- <div class="unified-panel-header" unselectable="on">
- <div class="unified-panel-header-inner">
- <div style="float: right">
- <a class='panel-header-button' href="${h.url_for( controller='root', action='history_options' )}" target="galaxy_main"><span>Options</span></a>
- </div>
- <div class="panel-header-text">History</div>
- </div>
- </div>
- <div class="unified-panel-body" style="overflow: hidden;">
- <iframe name="galaxy_history" width="100%" height="100%" frameborder="0" style="position: absolute; margin: 0; border: 0 none; height: 100%;" src="${h.url_for( controller='root', action='history' )}"></iframe>
- </div>
-</%def>
+</%def>
\ No newline at end of file
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/new_browser.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/tracks/new_browser.mako Thu Jun 11 15:37:19 2009 -0400
@@ -0,0 +1,50 @@
+<%inherit file="/base.mako"/>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript">
+$( function() {
+ $( "select[refresh_on_change='true']").change( function() {
+ $("#form").submit();
+ });
+});
+</script>
+</%def>
+
+<div class="form">
+ <div class="form-title">Select datasets to include in browser</div>
+ <div id="dbkey" class="form-body">
+ <form id="form" method="POST">
+ <div class="form-row">
+ <label for="dbkey">Reference genome build (dbkey): </label>
+ <div class="form-row-input">
+ <select name="dbkey" id="dbkey" refresh_on_change="true">
+ %for tmp_dbkey in dbkey_set:
+ <option value="${tmp_dbkey}"
+ %if tmp_dbkey == dbkey:
+ selected="true"
+ %endif
+ >${tmp_dbkey}</option>
+ %endfor
+ </select>
+ </div>
+ <div style="clear: both;"></div>
+ </div>
+ <div class="form-row">
+ <label for="dataset_ids">Datasets to include: </label>
+ %for key,value in datasets.items():
+ <div>
+ <input type="checkbox" name="dataset_ids" value="${key}" />
+ ${value}
+ </div>
+ %endfor
+
+ <div style="clear: both;"></div>
+ </div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="browse" value="Browse"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r 6125f71c838a -r 143dde05e1df templates/tracks/view.mako
--- a/templates/tracks/view.mako Thu Jun 11 12:20:03 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,88 +0,0 @@
-<html>
-
-<head>
-
-<link rel="stylesheet" type="text/css" href="/s/css/trackster.css" />
-
-<script type="text/javascript" src="/static/scripts/jquery.js"></script>
-<script type="text/javascript" src="/static/scripts/jquery.event.drag.js"></script>
-<script type="text/javascript" src="/static/scripts/trackster.js"></script>
-<script>
-
- var view = new View( "chr5", 180857866, 0, 180857866 );
- var tracks = new TrackLayout( view );
-
- $(function() {
-
- tracks.add( new LabelTrack( view, $("#viewport" ) ) );
- tracks.add( new LineTrack( "phastCons44way", view, $("#viewport" ) ) );
- tracks.add( new FeatureTrack( "knownGene", view, $("#viewport" ) ) );
-
- $(document).bind( "redraw", function( e ) {
- tracks.redraw();
- });
-
- $(window).resize( function( e ) {
- tracks.redraw();
- });
-
- $("#viewport").bind( "dragstart", function ( e ) {
- this.original_low = view.low;
- }).bind( "drag", function( e ) {
- var move_amount = ( e.offsetX - this.offsetLeft ) / this.offsetWidth;
- var range = view.high - view.low;
- var move_bases = Math.round( range * move_amount );
- var new_low = this.original_low - move_bases;
- if ( new_low < 0 ) {
- new_low = 0;
- }
- var new_high = new_low + range;
- if ( new_high > view.length ) {
- new_high = view.length;
- new_low = new_high - range;
- }
- view.low = new_low;
- view.high = new_high;
- tracks.redraw();
- });
-
- tracks.redraw();
- });
-</script>
-<body>
-
-<div id="content">
-
- <div id="overview">
- <div id="overview-viewport">
- <div id="overview-box"></div>
- </div>
- </div>
-
- <div id="nav">
-
- <div id="nav-controls">
- <a href="#" onclick="javascript:view.left(5);tracks.redraw();"><<</a>
- <a href="#" onclick="javascript:view.left(2);tracks.redraw();"><</a>
-
- <span style="display: inline-block; width: 30em; text-align: center;">Viewing chr5:<span id="low">0</span>-<span id="high">180857866</span></span>
-
- <span style="display: inline-block; width: 10em;">
- <a href="#" onclick="javascript:view.zoom_in(2);tracks.redraw();">+</a>
- <a href="#" onclick="javascript:view.zoom_out(2);tracks.redraw();">-</a>
- </span>
-
- <a href="#" onclick="javascript:view.right(2);tracks.redraw();">></a>
- <a href="#" onclick="javascript:view.right(5);tracks.redraw();">>></a>
- </div>
-
- </div>
-
- <div id="viewport">
- </div>
-
-</div>
-
-</body>
-
-</html>
1
0