galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
December 2009
- 30 participants
- 108 discussions
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/a1c6f236ab02
changeset: 3165:a1c6f236ab02
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Dec 09 15:44:10 2009 -0500
description:
Update BAM set_meta() and groom_dataset_content
diffstat:
lib/galaxy/datatypes/binary.py | 85 +++++++++++++++++++++---------------------
1 files changed, 42 insertions(+), 43 deletions(-)
diffs (103 lines):
diff -r b6252802e0b6 -r a1c6f236ab02 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py Wed Dec 09 15:21:41 2009 -0500
+++ b/lib/galaxy/datatypes/binary.py Wed Dec 09 15:44:10 2009 -0500
@@ -58,26 +58,32 @@
on an output dataset after the content is initially generated.
"""
# Use samtools to sort the Bam file
- tmp_dir = tempfile.gettempdir()
- # Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
- tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( file_name ) )
- # Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
- os.symlink( file_name, tmp_dataset_file_name )
- # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
- # TODO: This command may also create temporary files <out.prefix>.%d.bam when the
- # whole alignment cannot be fitted into memory ( controlled by option -m ). We're
- # not handling this case here.
- tmp_sorted_dataset_file = tempfile.NamedTemporaryFile( prefix=tmp_dataset_file_name )
- tmp_sorted_dataset_file_name = tmp_sorted_dataset_file.name
- tmp_sorted_dataset_file.close()
- command = "samtools sort %s %s 2>/dev/null" % ( tmp_dataset_file_name, tmp_sorted_dataset_file_name )
- proc = subprocess.Popen( args=command, shell=True )
+ ##$ samtools sort
+ ##Usage: samtools sort [-on] [-m <maxMem>] <in.bam> <out.prefix>
+ ## Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
+ ## This command may also create temporary files <out.prefix>.%d.bam when the
+ ## whole alignment cannot be fitted into memory ( controlled by option -m ).
+
+ #do this in a unique temp directory, because of possible <out.prefix>.%d.bam temp files
+ tmp_dir = tempfile.mkdtemp()
+ tmp_sorted_dataset_file_name_prefix = os.path.join( tmp_dir, 'sorted' )
+ stderr_name = tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "bam_sort_stderr" ).name
+ samtools_created_sorted_file_name = "%s.bam" % tmp_sorted_dataset_file_name_prefix #samtools accepts a prefix, not a filename, it always adds .bam to the prefix
+ command = "samtools sort %s %s" % ( file_name, tmp_sorted_dataset_file_name_prefix )
+ proc = subprocess.Popen( args=command, shell=True, cwd=tmp_dir, stderr=open( stderr_name, 'wb' ) )
proc.wait()
- tmp_sorted_bam_file_name = '%s.bam' % tmp_sorted_dataset_file_name
- # Move tmp_sorted_bam_file_name to our output dataset location
- shutil.move( tmp_sorted_bam_file_name, file_name )
- # Remove all remaining temporary files
- os.unlink( tmp_dataset_file_name )
+
+ #Did sort succeed?
+ stderr = open( stderr_name ).read().strip()
+ if stderr:
+ raise Exception, "Error Grooming BAM file contents: %s" % stderr
+
+ # Move samtools_created_sorted_file_name to our output dataset location
+ shutil.move( samtools_created_sorted_file_name, file_name )
+
+ # Remove temp file and empty temporary directory
+ os.unlink( stderr_name )
+ os.rmdir( tmp_dir )
def init_meta( self, dataset, copy_from=None ):
Binary.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, **kwd ):
@@ -86,30 +92,23 @@
index_file = dataset.metadata.bam_index
if not index_file:
index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset )
- tmp_dir = tempfile.gettempdir()
- # Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
- tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( dataset.file_name ) )
- # Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
- os.symlink( dataset.file_name, tmp_dataset_file_name )
- errors = False
- try:
- # Create the Bam index
- command = 'samtools index %s' % tmp_dataset_file_name
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- except Exception, e:
- errors = True
- err_msg = 'Error creating index for BAM file (%s)' % str( tmp_dataset_file_name )
- log.exception( err_msg )
- sys.stderr.write( err_msg + str( e ) )
- if not errors:
- # Move the temporary index file ~/tmp/dataset_XX.dat.bai to our metadata file
- # storage location ~/database/files/_metadata_files/dataset_XX.dat
- shutil.move( '%s.bai' % ( tmp_dataset_file_name ), index_file.file_name )
- # Remove all remaining temporary files
- os.unlink( tmp_dataset_file_name )
- # Set the metadata
- dataset.metadata.bam_index = index_file
+
+ # Create the Bam index
+ ##$ samtools index
+ ##Usage: samtools index <in.bam> [<out.index>]
+ stderr_name = tempfile.NamedTemporaryFile( prefix = "bam_index_stderr" ).name
+ command = 'samtools index %s %s' % ( dataset.file_name, index_file.file_name )
+ proc = subprocess.Popen( args=command, shell=True, stderr=open( stderr_name, 'wb' ) )
+
+ #Did index succeed?
+ stderr = open( stderr_name ).read().strip()
+ if stderr:
+ raise Exception, "Error Setting BAM Metadata: %s" % stderr
+
+ dataset.metadata.bam_index = index_file
+
+ # Remove temp file
+ os.unlink( stderr_name )
def sniff( self, filename ):
# BAM is compressed in the BGZF format, and must not be uncompressed in Galaxy.
# The first 4 bytes of any bam file is 'BAM\1', and the file is binary.
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/fdb34b7858df
changeset: 3158:fdb34b7858df
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Dec 08 13:25:33 2009 -0500
description:
Remove "c" prepended to column numbers from Column parameters when entered in workflow
diffstat:
lib/galaxy/tools/parameters/basic.py | 8 ++++++++
1 files changed, 8 insertions(+), 0 deletions(-)
diffs (18 lines):
diff -r 022a8c94883f -r fdb34b7858df lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Tue Dec 08 11:46:13 2009 -0500
+++ b/lib/galaxy/tools/parameters/basic.py Tue Dec 08 13:25:33 2009 -0500
@@ -764,6 +764,14 @@
self.accept_default = string_as_bool( elem.get( "accept_default", False ))
self.data_ref = elem.get( "data_ref", None )
self.is_dynamic = True
+ def from_html( self, value, trans=None, context={} ):
+ """
+ Label convention prepends column number with a 'c', but tool uses the integer. This
+ removes the 'c' when entered into a workflow.
+ """
+ if value.startswith("c"):
+ value = value[1:]
+ return super( ColumnListParameter, self ).from_html( value, trans, context )
def get_column_list( self, trans, other_values ):
"""
Generate a select list containing the columns of the associated
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d95a9c843c53
changeset: 3159:d95a9c843c53
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Tue Dec 08 17:05:35 2009 -0500
description:
Change the way that the check for if dataset hashed directories exists is handled.
diffstat:
lib/galaxy/model/__init__.py | 6 +-----
1 files changed, 1 insertions(+), 5 deletions(-)
diffs (17 lines):
diff -r fdb34b7858df -r d95a9c843c53 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Tue Dec 08 13:25:33 2009 -0500
+++ b/lib/galaxy/model/__init__.py Tue Dec 08 17:05:35 2009 -0500
@@ -374,12 +374,8 @@
if not os.path.exists( filename ):
dir = os.path.join( self.file_path, *directory_hash_id( self.id ) )
# Create directory if it does not exist
- try:
+ if not os.path.exists( dir ):
os.makedirs( dir )
- except OSError, e:
- # File Exists is okay, otherwise reraise
- if e.errno != errno.EEXIST:
- raise
# Return filename inside hashed directory
return os.path.abspath( os.path.join( dir, "dataset_%d.dat" % self.id ) )
else:
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/ba9ee5828e7b
changeset: 3162:ba9ee5828e7b
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Dec 09 12:18:27 2009 -0500
description:
Fix the recently introduced ColumnListParameter.from_html() method to handle multi-select lists - should fix at least the tabular2fasta broken functional test.
diffstat:
lib/galaxy/tools/parameters/basic.py | 10 +++++++++-
1 files changed, 9 insertions(+), 1 deletions(-)
diffs (20 lines):
diff -r b400212305b6 -r ba9ee5828e7b lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Wed Dec 09 10:15:15 2009 -0500
+++ b/lib/galaxy/tools/parameters/basic.py Wed Dec 09 12:18:27 2009 -0500
@@ -769,7 +769,15 @@
Label convention prepends column number with a 'c', but tool uses the integer. This
removes the 'c' when entered into a workflow.
"""
- if value.startswith("c"):
+ if type( value ) == list:
+ # We have a multi-select list
+ new_value = []
+ for item in value:
+ if item.startswith( "c" ):
+ item = item[1:]
+ new_value.append( item )
+ value = new_value
+ elif value and value.startswith( "c" ):
value = value[1:]
return super( ColumnListParameter, self ).from_html( value, trans, context )
def get_column_list( self, trans, other_values ):
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/47b702c583a3
changeset: 3160:47b702c583a3
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Dec 08 18:49:06 2009 -0500
description:
Added 'annotate history' functionality to pages. To accomplish this, refactored tagging from pure javascript to progressive javascript. Also fixed unicode and grid filtering bugs.
diffstat:
lib/galaxy/web/base/controller.py | 17 +
lib/galaxy/web/controllers/history.py | 50 +-
lib/galaxy/web/controllers/page.py | 33 +-
lib/galaxy/web/controllers/tag.py | 3 +-
lib/galaxy/web/framework/helpers/grids.py | 2 +-
static/scripts/autocomplete_tagging.js | 868 +++++++++++++++----------------
static/scripts/jquery.wymeditor.js | 5 +-
static/scripts/packed/autocomplete_tagging.js | 2 +-
static/wymeditor/lang/en.js | 1 +
templates/dataset/edit_attributes.mako | 10 +-
templates/history/sharing.mako | 2 +-
templates/history/view.mako | 7 +-
templates/page/display.mako | 169 ++++++
templates/page/editor.mako | 105 ++-
templates/page/history_annotation_table.mako | 59 ++
templates/page/wymiframe.mako | 27 +
templates/root/history.mako | 12 +-
templates/tagging_common.mako | 98 +++-
18 files changed, 935 insertions(+), 535 deletions(-)
diffs (1867 lines):
diff -r d95a9c843c53 -r 47b702c583a3 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py Tue Dec 08 17:05:35 2009 -0500
+++ b/lib/galaxy/web/base/controller.py Tue Dec 08 18:49:06 2009 -0500
@@ -7,6 +7,7 @@
# Pieces of Galaxy to make global in every controller
from galaxy import config, tools, web, model, util
from galaxy.web import error, form, url_for
+from galaxy.model.orm import *
from Cheetah.Template import Template
@@ -25,6 +26,22 @@
"""Returns the application toolbox"""
return self.app.toolbox
+ def get_history( self, trans, id, check_ownership=True ):
+ """Get a History from the database by id, verifying ownership."""
+ # Load history from database
+ id = trans.security.decode_id( id )
+ history = trans.sa_session.query( model.History ).get( id )
+ if not history:
+ err+msg( "History not found" )
+ if check_ownership:
+ # Verify ownership
+ user = trans.get_user()
+ if not user:
+ error( "Must be logged in to manage histories" )
+ if history.user != user:
+ error( "History is not owned by current user" )
+ return history
+
Root = BaseController
"""
Deprecated: `BaseController` used to be available under the name `Root`
diff -r d95a9c843c53 -r 47b702c583a3 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Dec 08 17:05:35 2009 -0500
+++ b/lib/galaxy/web/controllers/history.py Tue Dec 08 18:49:06 2009 -0500
@@ -44,7 +44,7 @@
return ""
def get_link( self, trans, grid, item ):
if item.users_shared_with or item.importable:
- return dict( operation="sharing" )
+ return dict( operation="sharing", id=item.id )
return None
class DeletedColumn( grids.GridColumn ):
@@ -205,7 +205,7 @@
# Load the histories and ensure they all belong to the current user
histories = []
for history_id in history_ids:
- history = get_history( trans, history_id )
+ history = self.get_history( trans, history_id )
if history:
# Ensure history is owned by current user
if history.user_id != None and trans.user:
@@ -237,7 +237,7 @@
history.importable = True
elif operation == "disable import via link":
if history_ids:
- histories = [ get_history( trans, history_id ) for history_id in history_ids ]
+ histories = [ self.get_history( trans, history_id ) for history_id in history_ids ]
for history in histories:
if history.importable:
history.importable = False
@@ -332,7 +332,7 @@
if not ids:
message = "Select a history to unshare"
return self.shared_list_grid( trans, status='error', message=message, **kwargs )
- histories = [ get_history( trans, history_id ) for history_id in ids ]
+ histories = [ self.get_history( trans, history_id ) for history_id in ids ]
for history in histories:
# Current user is the user with which the histories were shared
association = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=trans.user, history=history ).one()
@@ -375,7 +375,7 @@
@web.require_login( "get history name" )
def get_name_async( self, trans, id=None ):
""" Returns the name for a given history. """
- history = get_history( trans, id, False )
+ history = self.get_history( trans, id, False )
# To get name: user must own history, history must be importable.
if history.user == trans.get_user() or history.importable or trans.get_user() in history.users_shared_with:
@@ -386,15 +386,15 @@
@web.require_login( "set history's importable flag" )
def set_importable_async( self, trans, id=None, importable=False ):
""" Set history's importable attribute. """
- history = get_history( trans, id, True )
+ history = self.get_history( trans, id, True )
# Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
- importable = importable in ['True', 'true', 't'];
+ importable = importable in ['True', 'true', 't', 'T'];
if history and history.importable != importable:
history.importable = importable
trans.sa_session.flush()
- return result
+ return
@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
@@ -416,7 +416,7 @@
user_history = trans.get_history()
if not id:
return trans.show_error_message( "You must specify a history you want to import." )
- import_history = get_history( trans, id, check_ownership=False )
+ import_history = self.get_history( trans, id, check_ownership=False )
if not import_history:
return trans.show_error_message( "The specified history does not exist.")
if not import_history.importable:
@@ -470,7 +470,7 @@
# Get history to view.
if not id:
return trans.show_error_message( "You must specify a history you want to view." )
- history_to_view = get_history( trans, id, False)
+ history_to_view = self.get_history( trans, id, False)
# Integrity checks.
if not history_to_view:
return trans.show_error_message( "The specified history does not exist." )
@@ -512,7 +512,7 @@
send_to_err = err_msg
histories = []
for history_id in id:
- histories.append( get_history( trans, history_id ) )
+ histories.append( self.get_history( trans, history_id ) )
return trans.fill_template( "/history/share.mako",
histories=histories,
email=email,
@@ -618,7 +618,7 @@
send_to_err = ""
histories = []
for history_id in id:
- histories.append( get_history( trans, history_id ) )
+ histories.append( self.get_history( trans, history_id ) )
send_to_users = []
for email_address in util.listify( email ):
email_address = email_address.strip()
@@ -776,7 +776,7 @@
if id:
ids = util.listify( id )
if ids:
- histories = [ get_history( trans, history_id ) for history_id in ids ]
+ histories = [ self.get_history( trans, history_id ) for history_id in ids ]
for history in histories:
trans.sa_session.add( history )
if params.get( 'enable_import_via_link', False ):
@@ -831,7 +831,7 @@
histories = []
cur_names = []
for history_id in id:
- history = get_history( trans, history_id )
+ history = self.get_history( trans, history_id )
if history and history.user_id == user.id:
histories.append( history )
cur_names.append( history.get_display_name() )
@@ -872,7 +872,7 @@
ids = util.listify( id )
histories = []
for history_id in ids:
- history = get_history( trans, history_id, check_ownership=False )
+ history = self.get_history( trans, history_id, check_ownership=False )
histories.append( history )
user = trans.get_user()
for history in histories:
@@ -896,22 +896,4 @@
msg = 'Clone with name "%s" is now included in your previously stored histories.' % new_history.name
else:
msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
- return trans.show_ok_message( msg )
-
-## ---- Utility methods -------------------------------------------------------
-
-def get_history( trans, id, check_ownership=True ):
- """Get a History from the database by id, verifying ownership."""
- # Load history from database
- id = trans.security.decode_id( id )
- history = trans.sa_session.query( model.History ).get( id )
- if not history:
- err+msg( "History not found" )
- if check_ownership:
- # Verify ownership
- user = trans.get_user()
- if not user:
- error( "Must be logged in to manage histories" )
- if history.user != user:
- error( "History is not owned by current user" )
- return history
+ return trans.show_ok_message( msg )
\ No newline at end of file
diff -r d95a9c843c53 -r 47b702c583a3 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py Tue Dec 08 17:05:35 2009 -0500
+++ b/lib/galaxy/web/controllers/page.py Tue Dec 08 18:49:06 2009 -0500
@@ -315,5 +315,36 @@
@web.expose
@web.require_login("select a history from saved histories")
def list_histories_for_selection( self, trans, **kwargs ):
+ """ Returns HTML that enables a user to select one or more histories. """
# Render the list view
- return self._history_selection_grid( trans, **kwargs )
\ No newline at end of file
+ return self._history_selection_grid( trans, **kwargs )
+
+ @web.expose
+ @web.require_login("get annotation table for history")
+ def get_history_annotation_table( self, trans, id ):
+ """ Returns HTML for an annotation table for a history. """
+
+ # TODO: users should be able to annotate a history if they own it, it is importable, or it is shared with them. This only
+ # returns a history if a user owns it.
+ history = self.get_history( trans, id, True )
+
+ if history:
+ # TODO: Query taken from root/history; it should be moved either into history or trans object
+ # so that it can reused.
+ query = trans.sa_session.query( model.HistoryDatasetAssociation ) \
+ .filter( model.HistoryDatasetAssociation.history == history ) \
+ .options( eagerload( "children" ) ) \
+ .join( "dataset" ).filter( model.Dataset.purged == False ) \
+ .options( eagerload_all( "dataset.actions" ) ) \
+ .order_by( model.HistoryDatasetAssociation.hid )
+ # For now, do not show deleted datasets.
+ show_deleted = False
+ if not show_deleted:
+ query = query.filter( model.HistoryDatasetAssociation.deleted == False )
+ return trans.fill_template( "page/history_annotation_table.mako", history=history, datasets=query.all(), show_deleted=False )
+
+ @web.expose
+ def get_editor_iframe( self, trans ):
+ """ Returns the document for the page editor's iframe. """
+ return trans.fill_template( "page/wymiframe.mako" )
+
\ No newline at end of file
diff -r d95a9c843c53 -r 47b702c583a3 lib/galaxy/web/controllers/tag.py
--- a/lib/galaxy/web/controllers/tag.py Tue Dec 08 17:05:35 2009 -0500
+++ b/lib/galaxy/web/controllers/tag.py Tue Dec 08 18:49:06 2009 -0500
@@ -178,8 +178,9 @@
# Create and return autocomplete data.
ac_data = "#Header|Your Values for '%s'\n" % (tag_name)
+ tag_uname = self._get_usernames_for_tag(trans.sa_session, trans.get_user(), tag, item_class, item_tag_assoc_class)[0]
for row in result_set:
- ac_data += tag.name + ":" + row[0] + "|" + row[0] + "\n"
+ ac_data += tag_uname + ":" + row[0] + "|" + row[0] + "\n"
return ac_data
def _get_usernames_for_tag(self, db_session, user, tag, item_class, item_tag_assoc_class):
diff -r d95a9c843c53 -r 47b702c583a3 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Tue Dec 08 17:05:35 2009 -0500
+++ b/lib/galaxy/web/framework/helpers/grids.py Tue Dec 08 18:49:06 2009 -0500
@@ -360,7 +360,7 @@
elt_id="tagging-elt" + str( self.tag_elt_id_gen )
div_elt = "<div id=%s></div>" % elt_id
return div_elt + trans.fill_template( "/tagging_common.mako", trans=trans, tagged_item=item, elt_context=self.grid_name,
- elt_id = elt_id, in_form="true", input_size="20", tag_click_fn="add_tag_to_grid_filter" )
+ elt_id = elt_id, in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter" )
def filter( self, db_session, query, column_filter ):
""" Modify query to filter model_class by tag. Multiple filters are ANDed. """
if column_filter == "All":
diff -r d95a9c843c53 -r 47b702c583a3 static/scripts/autocomplete_tagging.js
--- a/static/scripts/autocomplete_tagging.js Tue Dec 08 17:05:35 2009 -0500
+++ b/static/scripts/autocomplete_tagging.js Tue Dec 08 18:49:06 2009 -0500
@@ -1,486 +1,458 @@
/**
- * JQuery extension for tagging with autocomplete.
- * @author: Jeremy Goecks
- * @require: jquery.autocomplete plugin
- */
-var ac_tag_area_id_gen = 1;
+* JQuery extension for tagging with autocomplete.
+* @author: Jeremy Goecks
+* @require: jquery.autocomplete plugin
+*/
+jQuery.fn.autocomplete_tagging = function(elt_id, options)
+{
-jQuery.fn.autocomplete_tagging = function(options) {
+ //
+ // Set up function defaults.
+ //
+ var defaults =
+ {
+ get_toggle_link_text_fn: function(tags)
+ {
+ var text = "";
+ var num_tags = array_length(tags);
+ if (num_tags != 0)
+ text = num_tags + (num_tags != 0 ? " Tags" : " Tag");
+ else
+ // No tags.
+ text = "Add tags";
+ return text;
+ },
+ tag_click_fn : function (name, value) { },
+ editable: true,
+ input_size: 20,
+ in_form: false,
+ tags : {},
+ use_toggle_link: true,
+ item_id: "",
+ add_tag_img: "",
+ add_tag_img_rollover: "",
+ delete_tag_img: "",
+ ajax_autocomplete_tag_url: "",
+ ajax_retag_url: "",
+ ajax_delete_tag_url: "",
+ ajax_add_tag_url: ""
+ };
- //
- // Set up function defaults.
- //
- var defaults =
- {
- get_toggle_link_text_fn: function(tags)
- {
- var text = "";
- var num_tags = array_length(tags);
- if (num_tags != 0)
- text = num_tags + (num_tags != 0 ? " Tags" : " Tag");
- else
- // No tags.
- text = "Add tags";
- return text;
- },
- tag_click_fn : function (name, value) { },
- editable: true,
- input_size: 20,
- in_form: false,
- tags : {},
- use_toggle_link: true,
- item_id: "",
- add_tag_img: "",
- add_tag_img_rollover: "",
- delete_tag_img: "",
- ajax_autocomplete_tag_url: "",
- ajax_retag_url: "",
- ajax_delete_tag_url: "",
- ajax_add_tag_url: ""
- };
+ //
+ // Extend object.
+ //
+ var settings = jQuery.extend(defaults, options);
- //
- // Extend object.
- //
- var settings = jQuery.extend(defaults, options);
-
- //
- // Create core elements: tag area and TODO.
- //
-
- // Tag area.
- var area_id = "tag-area-" + (ac_tag_area_id_gen)++;
- var tag_area = $("<div>").attr("id", area_id).addClass("tag-area");
- this.append(tag_area);
-
- //
- // Returns the number of keys (elements) in an array/dictionary.
- //
- var array_length = function(an_array)
- {
- if (an_array.length)
- return an_array.length;
+ //
+ // Returns the number of keys (elements) in an array/dictionary.
+ //
+ var array_length = function(an_array)
+ {
+ if (an_array.length)
+ return an_array.length;
- var count = 0;
- for (element in an_array)
- count++;
- return count;
- };
-
- //
- // Function to build toggle link.
- //
- var build_toggle_link = function()
- {
- var link_text = settings.get_toggle_link_text_fn(settings.tags);
- var toggle_link = $("<a href='/history/tags'>").text(link_text).addClass("toggle-link");
- // Link toggles the display state of the tag area.
- toggle_link.click( function()
- {
- // Take special actions depending on whether toggle is showing or hiding link.
- var showing_tag_area = (tag_area.css("display") == "none");
- var after_toggle_fn;
- if (showing_tag_area)
- {
- after_toggle_fn = function()
- {
- // If there are no tags, go right to editing mode by generating a
- // click on the area.
- var num_tags = array_length(settings.tags);
- if (num_tags == 0)
- tag_area.click();
- };
- }
- else // Hiding area.
- {
- after_toggle_fn = function()
- {
- tag_area.blur();
- };
- }
- tag_area.slideToggle("fast", after_toggle_fn);
-
- return false;
- });
-
- return toggle_link;
- };
-
- // Add toggle link.
- var toggle_link = build_toggle_link();
- if (settings.use_toggle_link)
- {
- this.prepend(toggle_link);
- }
-
- //
- // Function to build other elements.
- //
+ var count = 0;
+ for (element in an_array)
+ count++;
+ return count;
+ };
- //
- // Return a string that contains the contents of an associative array. This is
- // a debugging method.
- //
- var assoc_array_to_str = function(an_array)
- {
- // Convert associative array to simple array and then join array elements.
- var array_str_list = new Array();
- for (key in an_array)
- array_str_list[array_str_list.length] = key + "-->" + an_array[key];
-
- return "{" + array_str_list.join(",") + "}"
- };
+ //
+ // Initalize object's elements.
+ //
- //
- // Collapse tag name + value into a single string.
- //
- var build_tag_str = function(tag_name, tag_value)
- {
- return tag_name + ( (tag_value != "" && tag_value) ? ":" + tag_value : "");
- };
-
- //
- // Get tag name and value from a string.
- //
- var get_tag_name_and_value = function(tag_str)
- {
- return tag_str.split(":");
- };
-
- //
- // Add "add tag" button.
- //
- var build_add_tag_button = function(tag_input_field)
- {
- var add_tag_button = $("<img src='" + settings.add_tag_img + "' rollover='" + settings.add_tag_img_rollover + "'/>").addClass("add-tag-button");
-
- add_tag_button.click( function()
- {
- // Hide button.
- $(this).hide();
-
- // Clicking on button is the same as clicking on the tag area.
- tag_area.click();
-
- return false;
+ // Get elements for this object.
+ var this_obj = $('#' + elt_id);
+ var id_parts = $(this).attr('id').split("-");
+ var obj_id = id_parts[ id_parts.length-1 ];
+ var tag_area = this_obj.find('#tag-area-' + obj_id);
+ var toggle_link = this_obj.find('#toggle-link-' + obj_id);
+ var tag_input_field = this_obj.find('#tag-input');
+ var add_tag_button = this_obj.find('.add-tag-button');
+
+ // Initialize toggle link.
+ toggle_link.click( function() {
+ var id = $(this).attr('id').split('-')[2];
+
+ // Take special actions depending on whether toggle is showing or hiding link.
+ var tag_area = $('#tag-area-' + id);
+ var showing_tag_area = (tag_area.css("display") == "none");
+ var after_toggle_fn;
+ if (showing_tag_area)
+ {
+ after_toggle_fn = function()
+ {
+ // If there are no tags, go right to editing mode by generating a
+ // click on the area.
+ var num_tags = $(this).find('.tag-button').length;
+ if (num_tags == 0)
+ tag_area.click();
+ };
+ }
+ else // Hiding area.
+ {
+ after_toggle_fn = function()
+ {
+ tag_area.blur();
+ };
+ }
+ tag_area.slideToggle("fast", after_toggle_fn);
+
+ return $(this);
});
- return add_tag_button;
- };
+ // Initialize tag input field.
+ if (settings.editable)
+ tag_input_field.hide();
+ tag_input_field.keyup( function( e )
+ {
+ if ( e.keyCode == 27 )
+ {
+ // Escape key
+ $(this).trigger( "blur" );
+ }
+ else if (
+ ( e.keyCode == 13 ) || // Return Key
+ ( e.keyCode == 188 ) || // Comma
+ ( e.keyCode == 32 ) // Space
+ )
+ {
+ //
+ // Check input.
+ //
- //
- // Function that builds a tag button.
- //
- var build_tag_button = function(tag_str)
- {
- // Build "delete tag" image and handler.
- var delete_img = $("<img src='" + settings.delete_tag_img + "'/>").addClass("delete-tag-img");
- delete_img.mouseenter( function ()
- {
- $(this).attr("src", settings.delete_tag_img_rollover);
- });
- delete_img.mouseleave( function ()
- {
- $(this).attr("src", settings.delete_tag_img);
- });
- delete_img.click( function ()
- {
- // Tag button is image's parent.
- var tag_button = $(this).parent();
-
- // Get tag name, value.
- var tag_name_elt = tag_button.find(".tag-name").eq(0);
- var tag_str = tag_name_elt.text();
- var tag_name_and_value = get_tag_name_and_value(tag_str);
- var tag_name = tag_name_and_value[0];
- var tag_value = tag_name_and_value[1];
+ new_value = this.value;
- var prev_button = tag_button.prev();
- tag_button.remove();
+ // Do nothing if return key was used to autocomplete.
+ if (return_key_pressed_for_autocomplete == true)
+ {
+ return_key_pressed_for_autocomplete = false;
+ return false;
+ }
- // Remove tag from local list for consistency.
- delete settings.tags[tag_name];
-
- // Update toggle link text.
- var new_text = settings.get_toggle_link_text_fn(settings.tags);
- toggle_link.text(new_text);
+ // Suppress space after a ":"
+ if ( new_value.indexOf(": ", new_value.length - 2) != -1)
+ {
+ this.value = new_value.substring(0, new_value.length-1);
+ return false;
+ }
- // Delete tag.
- $.ajax({
- url: settings.ajax_delete_tag_url,
- data: { tag_name: tag_name },
- error: function()
- {
- // Failed. Roll back changes and show alert.
- settings.tags[tag_name] = tag_value;
- if (prev_button.hasClass("tag-button"))
- prev_button.after(tag_button);
- else
- tag_area.prepend(tag_button);
- var new_text = settings.get_toggle_link_text_fn(settings.tags);
- alert( "Remove tag failed" );
-
- toggle_link.text(new_text);
-
- // TODO: no idea why it's necessary to set this up again.
- delete_img.mouseenter( function ()
- {
- $(this).attr("src", settings.delete_tag_img_rollover);
- });
- delete_img.mouseleave( function ()
- {
- $(this).attr("src", settings.delete_tag_img);
- });
- },
- success: function() {}
- });
+ // Remove trigger keys from input.
+ if ( (e.keyCode == 188) || (e.keyCode == 32) )
+ new_value = new_value.substring( 0 , new_value.length - 1 );
- return true;
- });
+ // Trim whitespace.
+ new_value = new_value.replace(/^\s+|\s+$/g,"");
- // Build tag button.
- var tag_name_elt = $("<span>").text(tag_str).addClass("tag-name");
- tag_name_elt.click( function()
- {
- tag_name_and_value = tag_str.split(":")
- settings.tag_click_fn(tag_name_and_value[0], tag_name_and_value[1]);
- return true;
- });
+ // Too short?
+ if (new_value.length < 2)
+ return false;
- var tag_button = $("<span></span>").addClass("tag-button");
- tag_button.append(tag_name_elt);
- // Allow delete only if element is editable.
- if (settings.editable)
- tag_button.append(delete_img);
+ //
+ // New tag OK - apply it.
+ //
- return tag_button;
- };
+ this.value = "";
- //
- // Build input + autocompete for tag.
- //
- var build_tag_input = function(tag_text)
- {
- // If element is in form, tag input is a textarea; otherwise element is a input type=text.
- var t;
- if (settings.in_form)
- t = $( "<textarea id='history-tag-input' rows='1' cols='" +
- settings.input_size + "' value='" + escape(tag_text) + "'></textarea>" );
- else // element not in form.
- t = $( "<input id='history-tag-input' type='text' size='" +
- settings.input_size + "' value='" + escape(tag_text) + "'></input>" );
- t.keyup( function( e )
- {
- if ( e.keyCode == 27 )
- {
- // Escape key
- $(this).trigger( "blur" );
- } else if (
- ( e.keyCode == 13 ) || // Return Key
- ( e.keyCode == 188 ) || // Comma
- ( e.keyCode == 32 ) // Space
- )
- {
- //
- // Check input.
- //
-
- new_value = this.value;
-
- // Do nothing if return key was used to autocomplete.
- if (return_key_pressed_for_autocomplete == true)
- {
- return_key_pressed_for_autocomplete = false;
- return false;
- }
-
- // Suppress space after a ":"
- if ( new_value.indexOf(": ", new_value.length - 2) != -1)
- {
- this.value = new_value.substring(0, new_value.length-1);
- return false;
- }
-
- // Remove trigger keys from input.
- if ( (e.keyCode == 188) || (e.keyCode == 32) )
- new_value = new_value.substring( 0 , new_value.length - 1 );
-
- // Trim whitespace.
- new_value = new_value.replace(/^\s+|\s+$/g,"");
-
- // Too short?
- if (new_value.length < 2)
- return false;
-
- //
- // New tag OK - apply it.
- //
-
- this.value = "";
-
- // Add button for tag after all other tag buttons.
- var new_tag_button = build_tag_button(new_value);
- var tag_buttons = tag_area.children(".tag-button");
- if (tag_buttons.length != 0)
- {
- var last_tag_button = tag_buttons.slice(tag_buttons.length-1);
- last_tag_button.after(new_tag_button);
- }
- else
- tag_area.prepend(new_tag_button);
+ // Add button for tag after all other tag buttons.
+ var new_tag_button = build_tag_button(new_value);
+ var tag_buttons = tag_area.children(".tag-button");
+ if (tag_buttons.length != 0)
+ {
+ var last_tag_button = tag_buttons.slice(tag_buttons.length-1);
+ last_tag_button.after(new_tag_button);
+ }
+ else
+ tag_area.prepend(new_tag_button);
- // Add tag to internal list.
- var tag_name_and_value = new_value.split(":");
- settings.tags[tag_name_and_value[0]] = tag_name_and_value[1];
-
- // Update toggle link text.
- var new_text = settings.get_toggle_link_text_fn(settings.tags);
- toggle_link.text(new_text);
+ // Add tag to internal list.
+ var tag_name_and_value = new_value.split(":");
+ settings.tags[tag_name_and_value[0]] = tag_name_and_value[1];
- // Commit tag to server.
- var $this = $(this);
- $.ajax({
- url: settings.ajax_add_tag_url,
- data: { new_tag: new_value },
- error: function()
- {
- // Failed. Roll back changes and show alert.
- new_tag_button.remove();
- delete settings.tags[tag_name_and_value[0]];
- var new_text = settings.get_toggle_link_text_fn(settings.tags);
- toggle_link.text(new_text);
- alert( "Add tag failed" );
- },
- success: function()
- {
- // Flush autocomplete cache because it's not out of date.
- // TODO: in the future, we could remove the particular item
- // that was chosen from the cache rather than flush it.
- $this.flushCache();
- }
- });
-
- return false;
- }
+ // Update toggle link text.
+ var new_text = settings.get_toggle_link_text_fn(settings.tags);
+ toggle_link.text(new_text);
+
+ // Commit tag to server.
+ var $this = $(this);
+ $.ajax({
+ url: settings.ajax_add_tag_url,
+ data: { new_tag: new_value },
+ error: function()
+ {
+ // Failed. Roll back changes and show alert.
+ new_tag_button.remove();
+ delete settings.tags[tag_name_and_value[0]];
+ var new_text = settings.get_toggle_link_text_fn(settings.tags);
+ toggle_link.text(new_text);
+ alert( "Add tag failed" );
+ },
+ success: function()
+ {
+ // Flush autocomplete cache because it's not out of date.
+ // TODO: in the future, we could remove the particular item
+ // that was chosen from the cache rather than flush it.
+ $this.flushCache();
+ }
+ });
+
+ return false;
+ }
});
// Add autocomplete to input.
- var format_item_func = function(key, row_position, num_rows, value, search_term) {
- tag_name_and_value = value.split(":");
- return (tag_name_and_value.length == 1 ? tag_name_and_value[0] :tag_name_and_value[1]);
- //var array = new Array(key, value, row_position, num_rows,
- //search_term ); return "\"" + array.join("*") + "\"";
+ var format_item_func = function(key, row_position, num_rows, value, search_term)
+ {
+ tag_name_and_value = value.split(":");
+ return (tag_name_and_value.length == 1 ? tag_name_and_value[0] :tag_name_and_value[1]);
+ //var array = new Array(key, value, row_position, num_rows,
+ //search_term ); return "\"" + array.join("*") + "\"";
}
var autocomplete_options =
- { selectFirst: false, formatItem : format_item_func, autoFill: false, highlight: false };
-
- t.autocomplete(settings.ajax_autocomplete_tag_url, autocomplete_options);
+ { selectFirst: false, formatItem : format_item_func, autoFill: false, highlight: false };
+ tag_input_field.autocomplete(settings.ajax_autocomplete_tag_url, autocomplete_options);
+
+
+ // Initialize delete tag images for current tags.
+ this_obj.find('.delete-tag-img').each(function() {
+ init_delete_tag_image( $(this) );
+ });
- t.addClass("tag-input");
+ this_obj.find('.tag-name').each( function() {
+ $(this).click( function() {
+ var tag_str = $(this).text();
+ var tag_name_and_value = tag_str.split(":")
+ settings.tag_click_fn(tag_name_and_value[0], tag_name_and_value[1]);
+ return true;
+ });
+ });
- return t;
- };
-
- //
- // Build tag area.
- //
- // Add tag buttons for each current tag to the tag area.
- for (tag_name in settings.tags)
+
+ // Initialize "add tag" button.
+ add_tag_button.click( function()
{
- var tag_value = settings.tags[tag_name];
- var tag_str = build_tag_str(tag_name, tag_value);
- var tag_button = build_tag_button(tag_str, toggle_link, settings.tags);
- tag_area.append(tag_button);
+ // Hide button.
+ $(this).hide();
+
+ // Clicking on button is the same as clicking on the tag area.
+ tag_area.click();
+
+ return false;
+ });
+
+ //
+ // Set up tag area interactions; these are needed only if tags are editable.
+ //
+ if (settings.editable)
+ {
+ // When the tag area blurs, go to "view tag" mode.
+ tag_area.blur( function(e)
+ {
+ num_tags = array_length(settings.tags);
+ if (num_tags != 0)
+ {
+ add_tag_button.show();
+ tag_input_field.hide();
+ tag_area.removeClass("active-tag-area");
+ }
+ else
+ {
+ // No tags, so do nothing to ensure that input is still visible.
+ }
+ });
+
+ // On click, enable user to add tags.
+ tag_area.click( function(e)
+ {
+ var is_active = $(this).hasClass("active-tag-area");
+
+ // If a "delete image" object was pressed and area is inactive, do nothing.
+ if ($(e.target).hasClass("delete-tag-img") && !is_active)
+ return false;
+
+ // If a "tag name" object was pressed and area is inactive, do nothing.
+ if ($(e.target).hasClass("tag-name") && !is_active)
+ return false;
+
+ // Hide add tag button, show tag_input field. Change background to show
+ // area is active.
+ $(this).addClass("active-tag-area");
+ add_tag_button.hide();
+ tag_input_field.show();
+ tag_input_field.focus();
+
+ // Add handler to document that will call blur when the tag area is blurred;
+ // a tag area is blurred when a user clicks on an element outside the area.
+ var handle_document_click = function(e)
+ {
+ var tag_area_id = tag_area.attr("id");
+ // Blur the tag area if the element clicked on is not in the tag area.
+ if (
+ ($(e.target).attr("id") != tag_area_id) &&
+ ($(e.target).parents().filter(tag_area_id).length == 0)
+ )
+ {
+ tag_area.blur();
+ $(document).unbind("click", handle_document_click);
+ }
+ };
+ // TODO: we should attach the click handler to all frames in order to capture
+ // clicks outside the frame that this element is in.
+ //window.parent.document.onclick = handle_document_click;
+ //var temp = $(window.parent.document.body).contents().find("iframe").html();
+ //alert(temp);
+ //$(document).parent().click(handle_document_click);
+ $(window).click(handle_document_click);
+
+ return false;
+ });
}
-
- // Add tag input field and "add tag" button.
- var tag_input_field = build_tag_input("");
- var add_tag_button = build_add_tag_button(tag_input_field);
- // When the tag area blurs, go to "view tag" mode.
- tag_area.blur( function(e)
- {
- num_tags = array_length(settings.tags);
- if (num_tags != 0)
+ // If using toggle link, hide the tag area. Otherwise, show the tag area.
+ if (settings.use_toggle_link)
+ tag_area.hide();
+ else
{
- add_tag_button.show();
- tag_input_field.hide();
- tag_area.removeClass("active-tag-area");
+ var num_tags = array_length(settings.tags);
+ if (num_tags == 0)
+ {
+ add_tag_button.hide();
+ tag_input_field.show();
+ }
}
- else
+
+ // Initialize tag names.
+ //$('.tag-name').
+
+ //
+ // Helper functions.
+ //
+
+ //
+ // Collapse tag name + value into a single string.
+ //
+ function build_tag_str(tag_name, tag_value)
{
- // No tags, so do nothing to ensure that input is still visible.
- }
- });
-
- if (settings.editable)
- {
- tag_area.append(add_tag_button);
- tag_area.append(tag_input_field);
- tag_input_field.hide();
-
- // On click, enable user to add tags.
- tag_area.click( function(e)
- {
- var is_active = $(this).hasClass("active-tag-area");
+ return tag_name + ( (tag_value != "" && tag_value) ? ":" + tag_value : "");
+ };
- // If a "delete image" object was pressed and area is inactive, do nothing.
- if ($(e.target).hasClass("delete-tag-img") && !is_active)
- return false;
-
- // If a "tag name" object was pressed and area is inactive, do nothing.
- if ($(e.target).hasClass("tag-name") && !is_active)
- return false;
- // Hide add tag button, show tag_input field. Change background to show
- // area is active.
- $(this).addClass("active-tag-area");
- add_tag_button.hide();
- tag_input_field.show();
- tag_input_field.focus();
+ // Initialize a "delete tag image": when click, delete tag from UI and send delete request to server.
+ function init_delete_tag_image(delete_img)
+ {
+ $(delete_img).mouseenter( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img_rollover);
+ });
+ $(delete_img).mouseleave( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img);
+ });
+ $(delete_img).click( function ()
+ {
+ // Tag button is image's parent.
+ var tag_button = $(this).parent();
- // Add handler to document that will call blur when the tag area is blurred;
- // a tag area is blurred when a user clicks on an element outside the area.
- var handle_document_click = function(e)
- {
- var tag_area_id = tag_area.attr("id");
- // Blur the tag area if the element clicked on is not in the tag area.
- if (
- ($(e.target).attr("id") != tag_area_id) &&
- ($(e.target).parents().filter(tag_area_id).length == 0)
- )
- {
- tag_area.blur();
- $(document).unbind("click", handle_document_click);
- }
- };
- // TODO: we should attach the click handler to all frames in order to capture
- // clicks outside the frame that this element is in.
- //window.parent.document.onclick = handle_document_click;
- //var temp = $(window.parent.document.body).contents().find("iframe").html();
- //alert(temp);
- //$(document).parent().click(handle_document_click);
- $(window).click(handle_document_click);
-
- return false;
- });
- }
-
- // If using toggle link, hide the tag area. Otherwise, if there are no tags,
- // hide the "add tags" button and show the input field.
- if (settings.use_toggle_link)
- tag_area.hide();
- else
+ // Get tag name, value.
+ var tag_name_elt = tag_button.find(".tag-name").eq(0);
+ var tag_str = tag_name_elt.text();
+ var tag_name_and_value = get_tag_name_and_value(tag_str);
+ var tag_name = tag_name_and_value[0];
+ var tag_value = tag_name_and_value[1];
+
+ var prev_button = tag_button.prev();
+ tag_button.remove();
+
+ // Remove tag from local list for consistency.
+ delete settings.tags[tag_name];
+
+ // Update toggle link text.
+ var new_text = settings.get_toggle_link_text_fn(settings.tags);
+ toggle_link.text(new_text);
+
+ // Delete tag.
+ $.ajax({
+ url: settings.ajax_delete_tag_url,
+ data: { tag_name: tag_name },
+ error: function()
+ {
+ // Failed. Roll back changes and show alert.
+ settings.tags[tag_name] = tag_value;
+ if (prev_button.hasClass("tag-button"))
+ prev_button.after(tag_button);
+ else
+ tag_area.prepend(tag_button);
+ var new_text = settings.get_toggle_link_text_fn(settings.tags);
+ alert( "Remove tag failed" );
+
+ toggle_link.text(new_text);
+
+ // TODO: no idea why it's necessary to set this up again.
+ delete_img.mouseenter( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img_rollover);
+ });
+ delete_img.mouseleave( function ()
+ {
+ $(this).attr("src", settings.delete_tag_img);
+ });
+ },
+ success: function() {}
+ });
+
+ return true;
+ });
+ };
+
+
+ //
+ // Return a string that contains the contents of an associative array. This is
+ // a debugging method.
+ //
+ function assoc_array_to_str(an_array)
{
- var num_tags = array_length(settings.tags);
- if (num_tags == 0)
- {
- add_tag_button.hide();
- tag_input_field.show();
- }
- }
-
-
- return this.addClass("tag-element");
-}
+ // Convert associative array to simple array and then join array elements.
+ var array_str_list = new Array();
+ for (key in an_array)
+ array_str_list[array_str_list.length] = key + "-->" + an_array[key];
+
+ return "{" + array_str_list.join(",") + "}"
+ };
+
+ //
+ // Get tag name and value from a string.
+ //
+ function get_tag_name_and_value(tag_str)
+ {
+ return tag_str.split(":");
+ };
+
+ //
+ // Function that builds a tag button.
+ //
+ function build_tag_button(tag_str)
+ {
+ // Build "delete tag" image.
+ var delete_img = $("<img src='" + settings.delete_tag_img + "'/>").addClass("delete-tag-img");
+ init_delete_tag_image(delete_img);
+
+ // Build tag button.
+ var tag_name_elt = $("<span>").text(tag_str).addClass("tag-name");
+ tag_name_elt.click( function()
+ {
+ tag_name_and_value = tag_str.split(":")
+ settings.tag_click_fn(tag_name_and_value[0], tag_name_and_value[1]);
+ return true;
+ });
+
+ var tag_button = $("<span></span>").addClass("tag-button");
+ tag_button.append(tag_name_elt);
+ // Allow delete only if element is editable.
+ if (settings.editable)
+ tag_button.append(delete_img);
+
+ return tag_button;
+ };
+
+};
diff -r d95a9c843c53 -r 47b702c583a3 static/scripts/jquery.wymeditor.js
--- a/static/scripts/jquery.wymeditor.js Tue Dec 08 17:05:35 2009 -0500
+++ b/static/scripts/jquery.wymeditor.js Tue Dec 08 18:49:06 2009 -0500
@@ -370,8 +370,9 @@
iframeHtml:"<div class='wym_iframe wym_section'>"
+ "<iframe "
+ "src='"
- + WYMeditor.IFRAME_BASE_PATH
- + "wymiframe.html' "
+ + "/page/get_editor_iframe' "
+ //+ WYMeditor.IFRAME_BASE_PATH
+ //+ "wymiframe.html' "
+ "onload='this.contentWindow.parent.WYMeditor.INSTANCES["
+ WYMeditor.INDEX + "].initIframe(this)'"
+ "></iframe>"
diff -r d95a9c843c53 -r 47b702c583a3 static/scripts/packed/autocomplete_tagging.js
--- a/static/scripts/packed/autocomplete_tagging.js Tue Dec 08 17:05:35 2009 -0500
+++ b/static/scripts/packed/autocomplete_tagging.js Tue Dec 08 18:49:06 2009 -0500
@@ -1,1 +1,1 @@
-var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u,v){},editable:true,input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>").text(u).addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};v
ar s=b();if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E
.after(D)}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>").text(u).addClass("tag-name");w.click(function(){tag_name_and_value=u.split(":");p.tag_click_fn(tag_name_and_value[0],tag_name_and_value[1]);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);if(p.editable){x.append(v)}return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+escape(v)+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+escape(v)+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true)
{return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<2){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,fo
rmatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});if(p.editable){m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false})}if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
+jQuery.fn.autocomplete_tagging=function(f,d){var g={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u,v){},editable:true,input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var q=jQuery.extend(g,d);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var e=$("#"+f);var m=$(this).attr("id").split("-");var a=m[m.length-1];var l=e.find("#tag-area-"+a);var s=e.find("#toggle-link-"+a);var n=e.find("#tag-input");var h=e.find(".add-tag-button");s.click(function(){var x=$(this).attr("id").split("-")[2];var v=$("#tag-area-"+x);var u=(v.css("display")=="none");var w;if(u){w=function(){var y=$(this).find(".tag-button").length;if(y==0){v.click()}}}else{w=function(){v.blur()}}v.slideToggle("fast",w);re
turn $(this)});if(q.editable){n.hide()}n.keyup(function(z){if(z.keyCode==27){$(this).trigger("blur")}else{if((z.keyCode==13)||(z.keyCode==188)||(z.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true){return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((z.keyCode==188)||(z.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<2){return false}this.value="";var w=j(new_value);var v=l.children(".tag-button");if(v.length!=0){var A=v.slice(v.length-1);A.after(w)}else{l.prepend(w)}var u=new_value.split(":");q.tags[u[0]]=u[1];var x=q.get_toggle_link_text_fn(q.tags);s.text(x);var y=$(this);$.ajax({url:q.ajax_add_tag_url,data:{new_tag:new_value},error:function(){w.remove();delete q.tags[u[0]];var B=q.get_toggle_link_text_fn(q.tags);s.text(B);alert("Add tag fail
ed")},success:function(){y.flushCache()}});return false}}});var c=function(w,v,u,y,x){tag_name_and_value=y.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var k={selectFirst:false,formatItem:c,autoFill:false,highlight:false};n.autocomplete(q.ajax_autocomplete_tag_url,k);e.find(".delete-tag-img").each(function(){p($(this))});e.find(".tag-name").each(function(){$(this).click(function(){var v=$(this).text();var u=v.split(":");q.tag_click_fn(u[0],u[1]);return true})});h.click(function(){$(this).hide();l.click();return false});if(q.editable){l.blur(function(u){r=o(q.tags);if(r!=0){h.show();n.hide();l.removeClass("active-tag-area")}else{}});l.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");h.hide();n.show();n.focus();var u=function(y){var x=l.attr("id");if(($(y.target).attr("id
")!=x)&&($(y.target).parents().filter(x).length==0)){l.blur();$(document).unbind("click",u)}};$(window).click(u);return false})}if(q.use_toggle_link){l.hide()}else{var r=o(q.tags);if(r==0){h.hide();n.show()}}function b(v,u){return v+((u!=""&&u)?":"+u:"")}function p(u){$(u).mouseenter(function(){$(this).attr("src",q.delete_tag_img_rollover)});$(u).mouseleave(function(){$(this).attr("src",q.delete_tag_img)});$(u).click(function(){var A=$(this).parent();var z=A.find(".tag-name").eq(0);var y=z.text();var w=i(y);var C=w[0];var v=w[1];var B=A.prev();A.remove();delete q.tags[C];var x=q.get_toggle_link_text_fn(q.tags);s.text(x);$.ajax({url:q.ajax_delete_tag_url,data:{tag_name:C},error:function(){q.tags[C]=v;if(B.hasClass("tag-button")){B.after(A)}else{l.prepend(A)}var D=q.get_toggle_link_text_fn(q.tags);alert("Remove tag failed");s.text(D);u.mouseenter(function(){$(this).attr("src",q.delete_tag_img_rollover)});u.mouseleave(function(){$(this).attr("src",q.delete_tag_img)})},success:f
unction(){}});return true})}function t(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"}function i(u){return u.split(":")}function j(u){var v=$("<img src='"+q.delete_tag_img+"'/>").addClass("delete-tag-img");p(v);var w=$("<span>").text(u).addClass("tag-name");w.click(function(){tag_name_and_value=u.split(":");q.tag_click_fn(tag_name_and_value[0],tag_name_and_value[1]);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);if(q.editable){x.append(v)}return x}};
\ No newline at end of file
diff -r d95a9c843c53 -r 47b702c583a3 static/wymeditor/lang/en.js
--- a/static/wymeditor/lang/en.js Tue Dec 08 17:05:35 2009 -0500
+++ b/static/wymeditor/lang/en.js Tue Dec 08 18:49:06 2009 -0500
@@ -45,5 +45,6 @@
// Galaxy replacements.
Galaxy_History_Link: 'Insert Link to History',
Galaxy_Dataset_Link: 'Insert Link to Dataset',
+ Annotate_Galaxy_History: 'Annotate History',
};
diff -r d95a9c843c53 -r 47b702c583a3 templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/dataset/edit_attributes.mako Tue Dec 08 18:49:06 2009 -0500
@@ -54,12 +54,16 @@
<label>
Tags:
</label>
- <div id="dataset-tag-area"
- style="float: left; margin-left: 1px; width: 295px; margin-right: 10px; border-style: inset; border-color: #ddd; border-width: 1px">
+ <div style="float: left; width: 295px; margin-right: 10px; border-style: inset; border-width: 1px">
+ <style>
+ .tag-area {
+ border: none;
+ }
+ </style>
+ ${render_tagging_element(data, "edit_attributes.mako", use_toggle_link=False, in_form=True, input_size="30")}
</div>
<div style="clear: both"></div>
</div>
- ${render_tagging_element(data, "dataset-tag-area", "edit_attributes.mako", use_toggle_link="false", in_form="true", input_size="30")}
%endif
%for name, spec in data.metadata.spec.items():
%if spec.visible:
diff -r d95a9c843c53 -r 47b702c583a3 templates/history/sharing.mako
--- a/templates/history/sharing.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/history/sharing.mako Tue Dec 08 18:49:06 2009 -0500
@@ -14,7 +14,7 @@
%else:
%for history in histories:
<div class="toolForm">
- <div class="toolFormTitle">History '${history.name}' shared with</div>
+ <div class="toolFormTitle">History '${history.get_display_name()}' shared with</div>
<div class="toolFormBody">
<div class="form-row">
<div style="float: right;">
diff -r d95a9c843c53 -r 47b702c583a3 templates/history/view.mako
--- a/templates/history/view.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/history/view.mako Tue Dec 08 18:49:06 2009 -0500
@@ -2,7 +2,7 @@
<%def name="javascripts()">
${parent.javascripts()}
- ${h.js( "jquery", "json2", "jquery.jstore-all", "jquery.autocomplete", "autocomplete_tagging" )}
+ ${h.js( "galaxy.base", "jquery", "json2", "jquery.jstore-all", "jquery.autocomplete", "autocomplete_tagging" )}
</%def>
<%def name="stylesheets()">
@@ -320,14 +320,11 @@
<p></p>
%endif
- <div id="history-tag-area" style="margin-bottom: 1em">
- </div>
-
<%namespace file="../tagging_common.mako" import="render_tagging_element" />
%if trans.get_user() is not None:
<div id='history-tag-area' class="tag-element"></div>
- ${render_tagging_element(history, "history-tag-area", "history/view.mako", use_toggle_link='false', get_toggle_link_text_fn='get_toggle_link_text', editable=user_owns_history)}
+ ${render_tagging_element(tagged_item=history, elt_context="history/view.mako", use_toggle_link=False, get_toggle_link_text_fn='get_toggle_link_text', editable=user_owns_history)}
%endif
%if not datasets:
diff -r d95a9c843c53 -r 47b702c583a3 templates/page/display.mako
--- a/templates/page/display.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/page/display.mako Tue Dec 08 18:49:06 2009 -0500
@@ -2,6 +2,175 @@
<%def name="title()">Galaxy :: ${page.user.username} :: ${page.title}</%def>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "jquery", "json2", "jquery.jstore-all", "jquery.autocomplete", "autocomplete_tagging" )}
+ <script type="text/javascript">
+ $(function() {
+ // Load jStore for local storage
+ $.extend(jQuery.jStore.defaults, { project: 'galaxy', flash: '/static/jStore.Flash.html' })
+ $.jStore.load(); // Auto-select best storage
+
+ $.jStore.ready(function(engine) {
+ engine.ready(function() {
+ // Init stuff that requires the local storage to be running
+ //initShowHide();
+ setupHistoryItem( $("div.historyItemWrapper") );
+
+ // Hide all for now.
+ $( "div.historyItemBody:visible" ).each( function() {
+ if ( $.browser.mozilla ) {
+ $(this).find( "pre.peek" ).css( "overflow", "hidden" );
+ }
+ $(this).slideUp( "fast" );
+ });
+
+ });
+ });
+ });
+ // Functionized so AJAX'd datasets can call them
+ function initShowHide() {
+
+ // Load saved state and show as necessary
+ try {
+ var stored = $.jStore.store("history_expand_state");
+ if (stored) {
+ var st = JSON.parse(stored);
+ for (var id in st) {
+ $("#" + id + " div.historyItemBody" ).show();
+ }
+ }
+ } catch(err) {
+ // Something was wrong with values in storage, so clear storage
+ $.jStore.remove("history_expand_state");
+ }
+
+ // If Mozilla, hide scrollbars in hidden items since they cause animation bugs
+ if ( $.browser.mozilla ) {
+ $( "div.historyItemBody" ).each( function() {
+ if ( ! $(this).is( ":visible" ) ) $(this).find( "pre.peek" ).css( "overflow", "hidden" );
+ })
+ }
+ }
+ // Add show/hide link and delete link to a history item
+ function setupHistoryItem( query ) {
+ query.each( function() {
+ var id = this.id;
+ var body = $(this).children( "div.historyItemBody" );
+ var peek = body.find( "pre.peek" )
+ $(this).children( ".historyItemTitleBar" ).find( ".historyItemTitle" ).wrap( "<a href='#'></a>" ).click( function() {
+ if ( body.is(":visible") ) {
+ // Hiding stuff here
+ if ( $.browser.mozilla ) { peek.css( "overflow", "hidden" ) }
+ body.slideUp( "fast" );
+
+ // Save setting
+ var stored = $.jStore.store("history_expand_state")
+ var prefs = stored ? JSON.parse(stored) : null
+ if (prefs) {
+ delete prefs[id];
+ $.jStore.store("history_expand_state", JSON.stringify(prefs));
+ }
+ } else {
+ // Showing stuff here
+ body.slideDown( "fast", function() {
+ if ( $.browser.mozilla ) { peek.css( "overflow", "auto" ); }
+ });
+
+ // Save setting
+ var stored = $.jStore.store("history_expand_state")
+ var prefs = stored ? JSON.parse(stored) : new Object;
+ prefs[id] = true;
+ $.jStore.store("history_expand_state", JSON.stringify(prefs));
+ }
+ return false;
+ });
+ // Delete link
+ $(this).find( "div.historyItemButtons > .delete" ).each( function() {
+ var data_id = this.id.split( "-" )[1];
+ $(this).click( function() {
+ $( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
+ $.ajax({
+ url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { alert( "Delete failed" ) },
+ success: function() {
+ %if show_deleted:
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
+ %else:
+ $( "#historyItem-" + data_id ).fadeOut( "fast", function() {
+ $( "#historyItemContainer-" + data_id ).remove();
+ if ( $( "div.historyItemContainer" ).length < 1 ) {
+ $( "#emptyHistoryMessage" ).show();
+ }
+ });
+ %endif
+ }
+ });
+ return false;
+ });
+ });
+ // Undelete link
+ $(this).find( "a.historyItemUndelete" ).each( function() {
+ var data_id = this.id.split( "-" )[1];
+ $(this).click( function() {
+ $( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
+ $.ajax({
+ url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { alert( "Undelete failed" ) },
+ success: function() {
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
+ }
+ });
+ return false;
+ });
+ });
+ });
+ };
+
+
+ //TODO: this function is a duplicate of array_length defined in galaxy.base.js ; not sure why it needs to be redefined here (due to streaming?).
+ // Returns the number of keys (elements) in an array/dictionary.
+ var array_length = function(an_array)
+ {
+ if (an_array.length)
+ return an_array.length;
+
+ var count = 0;
+ for (element in an_array)
+ count++;
+ return count;
+ };
+
+ //
+ // Function provides text for tagging toggle link.
+ //
+ var get_toggle_link_text = function(tags)
+ {
+ var text = "";
+ var num_tags = array_length(tags);
+ if (num_tags != 0)
+ {
+ text = num_tags + (num_tags != 1 ? " Tags" : " Tag");
+ }
+ else
+ {
+ // No tags.
+ text = "Add tags to history";
+ }
+ return text;
+ };
+ </script>
+</%def>
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "base", "history", "autocomplete_tagging" )}
+</%def>
+
<%def name="init()">
<%
self.has_left_panel=False
diff -r d95a9c843c53 -r 47b702c583a3 templates/page/editor.mako
--- a/templates/page/editor.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/page/editor.mako Tue Dec 08 18:49:06 2009 -0500
@@ -21,31 +21,37 @@
</%def>
<%def name="javascripts()">
-
${parent.javascripts()}
-
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.drag.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.drop.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.hover.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.form.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.jstore-all.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/json2.js')}"> </script>
-
- <script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.base.js')}"> </script>
-
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.wymeditor.js')}"> </script>
-
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.autocomplete.js')}"> </script>
-
+ ${h.js( "jquery", "jquery.event.drag", "jquery.event.drop", "jquery.event.hover", "jquery.form", "jquery.jstore-all", "json2",
+ "galaxy.base", "jquery.wymeditor", "jquery.autocomplete", "autocomplete_tagging")}
<script type="text/javascript">
// Useful Galaxy stuff.
var Galaxy =
{
DIALOG_HISTORY_LINK : "history_link",
+ DIALOG_HISTORY_ANNOTATE : "history_annotate",
};
-
+
+ // Initialize Galaxy elements.
+ function init_galaxy_elts(wym)
+ {
+ // Set up events to make annotation easy.
+ $('.annotation', wym._doc.body).each( function()
+ {
+ $(this).click( function() {
+ // Works in Safari, not in Firefox.
+ var range = wym._doc.createRange();
+ range.selectNodeContents( this );
+ var selection = window.getSelection();
+ selection.removeAllRanges();
+ selection.addRange(range);
+ var t = "";
+ });
+ });
+
+ };
+
## Completely replace WYM's dialog handling
WYMeditor.editor.prototype.dialog = function( dialogType, dialogFeatures, bodyHtml ) {
@@ -193,7 +199,7 @@
);
}
- // HISTORY DIALOG
+ // INSERT HISTORY LINK DIALOG
if ( dialogType == Galaxy.DIALOG_HISTORY_LINK ) {
$.ajax(
{
@@ -244,7 +250,7 @@
// Get history name.
$.get( '${h.url_for( controller='history', action='get_name_async' )}?id=' + item_id, function( history_name ) {
var href = '${h.url_for( controller='history', action='view' )}?id=' + item_id;
- wym.insert("<a href='" + href + "'>History '" + history_name + "'</a>nbsp;");
+ wym.insert("<a href='" + href + "'>History '" + history_name + "'</a>");
});
}
else
@@ -268,6 +274,54 @@
}
});
}
+ // ANNOTATE HISTORY DIALOG
+ if ( dialogType == Galaxy.DIALOG_ANNOTATE_HISTORY ) {
+ $.ajax(
+ {
+ url: "${h.url_for( action='list_histories_for_selection' )}",
+ data: {},
+ error: function() { alert( "Grid refresh failed" ) },
+ success: function(table_html)
+ {
+ show_modal(
+ "Insert Link to History",
+ table_html,
+ {
+ "Annotate": function()
+ {
+ // Insert links to history for each checked item.
+ var item_ids = new Array();
+ $('input[name=id]:checked').each(function() {
+ var item_id = $(this).val();
+
+ // Get annotation table for history.
+ $.ajax(
+ {
+ url: "${h.url_for( action='get_history_annotation_table' )}",
+ data: { id : item_id },
+ error: function() { alert( "Grid refresh failed" ) },
+ success: function(result)
+ {
+ // Insert into document.
+ wym.insert(result);
+
+ init_galaxy_elts(wym);
+
+ }
+ });
+ });
+
+ hide_modal();
+ },
+ "Cancel": function()
+ {
+ hide_modal();
+ }
+ }
+ );
+ }
+ });
+ }
};
</script>
@@ -313,7 +367,8 @@
{'name': 'Unlink', 'title': 'Unlink', 'css': 'wym_tools_unlink'},
{'name': 'InsertImage', 'title': 'Image', 'css': 'wym_tools_image'},
{'name': 'InsertTable', 'title': 'Table', 'css': 'wym_tools_table'},
- {'name': 'Insert Galaxy History Link', 'title' : 'Galaxy_History_Link', 'css' : 'galaxy_tools_insert_history_link'}
+ {'name': 'Insert Galaxy History Link', 'title' : 'Galaxy_History_Link', 'css' : 'galaxy_tools_insert_history_link'},
+ {'name': 'Annonate Galaxy History', 'title' : 'Annotate_Galaxy_History', 'css' : 'galaxy_tools_annotate_history'},
]
});
## Get the editor object
@@ -367,13 +422,19 @@
$('.galaxy_tools_insert_history_link').children().click( function() {
editor.dialog(Galaxy.DIALOG_HISTORY_LINK);
});
+ // Initialize 'Annotate history' button.
+ $('.galaxy_tools_annotate_history').children().click( function() {
+ editor.dialog(Galaxy.ANNOTATE_HISTORY);
+ });
+ // Initialize galaxy elements.
+ //init_galaxy_elts(editor);
});
</script>
</%def>
<%def name="stylesheets()">
${parent.stylesheets()}
- ${h.css( "autocomplete_tagging" )}
+ ${h.css( "base", "history", "autocomplete_tagging" )}
</%def>
<%def name="center_panel()">
@@ -384,7 +445,7 @@
<a id="close-button" class="panel-header-button">Close</a>
</div>
<div class="unified-panel-header-inner">
- Page editor
+ Page Editor <span style="font-weight: normal">| Title : ${page.title}</span>
</div>
</div>
diff -r d95a9c843c53 -r 47b702c583a3 templates/page/history_annotation_table.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/page/history_annotation_table.mako Tue Dec 08 18:49:06 2009 -0500
@@ -0,0 +1,59 @@
+<%namespace file="../tagging_common.mako" import="render_tagging_element_html" />
+<%namespace file="../root/history_common.mako" import="render_dataset" />
+
+<table>
+ ## Table header.
+ <tr>
+ <th colspan='2'>History '${history.get_display_name()}'</th>
+ </tr>
+ <tr>
+ ## Status messages and tags.
+ <th colspan='2'>
+ %if history.deleted:
+ <div class="warningmessagesmall">
+ ${_('This is a deleted history.')}
+ </div>
+ %endif
+ %if trans.get_user() is not None:
+ Tags: ${render_tagging_element_html( tagged_item=history, editable=False, use_toggle_link=False )}
+ %endif
+ </th>
+ </tr>
+ <tr>
+ <th colspan="2">Description of History:
+ <ol>
+ <li>What was the motivation for this history?
+ <li>What is the outcome of this history?
+ <li>What are unresolved questions from this history?
+ <li>What new questions arise from this history?
+ </ol>
+ </th>
+ </tr>
+
+ ## Table body. For each dataset, there is an area to annotate the dataset.
+ %if not datasets:
+ <tr>
+ <td>
+ <div class="infomessagesmall" id="emptyHistoryMessage">
+ ${_("Your history is empty. Click 'Get Data' on the left pane to start")}
+ </div>
+ </td>
+ </tr>
+ %else:
+ ## Render requested datasets.
+ %for data in datasets:
+ %if data.visible:
+ <tr>
+ <td valign="top"><span class="annotation">Describe this step: why was it done? what data does it produce?</span></td>
+ ##<td valign="top" class="annotation">Describe this step: why was it done? what data does it produce?</td>
+ <td>
+ <div class="historyItemContainer" id="historyItemContainer-${data.id}">
+ ${render_dataset( data, data.hid, show_deleted_on_refresh = show_deleted, user_owns_dataset = False )}
+ </div>
+ </td>
+ </tr>
+ %endif
+ %endfor
+ %endif
+</table>
+
\ No newline at end of file
diff -r d95a9c843c53 -r 47b702c583a3 templates/page/wymiframe.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/page/wymiframe.mako Tue Dec 08 18:49:06 2009 -0500
@@ -0,0 +1,27 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ * http://www.wymeditor.org/
+ *
+ * File Name:
+ * wymiframe.html
+ * Iframe used by designMode.
+ * See the documentation for more info.
+ *
+ * File Authors:
+ * Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+-->
+<html>
+ <head>
+ <title>WYMeditor iframe</title>
+ <meta http-equiv="X-UA-Compatible" content="IE=EmulateIE7" />
+ <link rel="stylesheet" type="text/css" media="screen" href="/static/wymeditor/iframe/galaxy/wymiframe.css" />
+ ${h.css("base", "history", "autocomplete_tagging")}
+ </head>
+ <body class="wym_iframe text-content"></body>
+</html>
diff -r d95a9c843c53 -r 47b702c583a3 templates/root/history.mako
--- a/templates/root/history.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/root/history.mako Tue Dec 08 18:49:06 2009 -0500
@@ -77,6 +77,7 @@
<% updateable = [data for data in reversed( datasets ) if data.visible and data.state not in [ "deleted", "empty", "error", "ok" ]] %>
${ ",".join( map(lambda data: "\"%s\" : \"%s\"" % (data.id, data.state), updateable) ) }
});
+
// Navigate to a dataset.
%if hda_id:
self.location = "#${hda_id}";
@@ -317,15 +318,16 @@
<p></p>
%endif
-<div id="history-tag-area" style="margin-bottom: 1em">
-</div>
-
<%namespace file="../tagging_common.mako" import="render_tagging_element" />
<%namespace file="history_common.mako" import="render_dataset" />
%if trans.get_user() is not None:
- <div id='history-tag-area' class="tag-element"></div>
- ${render_tagging_element( tagged_item=history, elt_id="history-tag-area", elt_context="history.mako", get_toggle_link_text_fn='get_toggle_link_text' )}
+ <style>
+ .tag-element {
+ margin-bottom: 0.5em;
+ }
+ </style>
+ ${render_tagging_element( tagged_item=history, elt_context='history.mako', get_toggle_link_text_fn='get_toggle_link_text' )}
%endif
%if not datasets:
diff -r d95a9c843c53 -r 47b702c583a3 templates/tagging_common.mako
--- a/templates/tagging_common.mako Tue Dec 08 17:05:35 2009 -0500
+++ b/templates/tagging_common.mako Tue Dec 08 18:49:06 2009 -0500
@@ -1,10 +1,76 @@
+<%!
+ from cgi import escape
+ from galaxy.web.framework.helpers import iff
+%>
## Render a tagging element if there is a tagged_item.
-%if tagged_item is not None and elt_id is not None:
- ${render_tagging_element(tagged_item=tagged_item, elt_id=elt_id, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn)}
+%if tagged_item is not None:
+ ${render_tagging_element(tagged_item=tagged_item, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn)}
%endif
+## Render HTML for a tagging element.
+<%def name="render_tagging_element_html(tagged_item=None, editable=True, use_toggle_link=True, input_size='15', in_form=False)">
+ ## Useful attributes.
+ <%
+ tagged_item_id = str( trans.security.encode_id (tagged_item.id) )
+ elt_id = "tag-element-" + tagged_item_id
+ %>
+ <div id="${elt_id}" class="tag-element">
+ %if use_toggle_link:
+ <a id="toggle-link-${tagged_item_id}" class="toggle-link" href="#">${len(tagged_item.tags)} Tags</a>
+ %endif
+ <div id="tag-area-${tagged_item_id}" class="tag-area">
+
+ ## Build buttons for current tags.
+ %for tag in tagged_item.tags:
+ <%
+ tag_name = tag.user_tname
+ tag_value = None
+ if tag.value is not None:
+ tag_value = tag.user_value
+ ## Convert tag name, value to unicode.
+ if isinstance( tag_name, str ):
+ tag_name = unicode( escape( tag_name ), 'utf-8' )
+ if tag_value:
+ tag_value = unicode( escape( tag_value ), 'utf-8' )
+ tag_str = tag_name + ":" + tag_value
+ else:
+ tag_str = tag_name
+ %>
+ <span class="tag-button">
+ <span class="tag-name">${tag_str}</span>
+ %if editable:
+ <img class="delete-tag-img" src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/>
+ %endif
+ </span>
+ %endfor
+
+ ## Add tag input field. If element is in form, tag input is a textarea; otherwise element is a input type=text.
+ %if editable:
+ %if in_form:
+ <textarea id='tag-input' class="tag-input" rows='1' cols='${input_size}'></textarea>
+ %else:
+ <input id='tag-input' class="tag-input" type='text' size='${input_size}'></input>
+ %endif
+ ## Add "add tag" button.
+ <img src='${h.url_for('/static/images/add_icon.png')}' rollover='${h.url_for('/static/images/add_icon_dark.png')}' class="add-tag-button"/>
+ %endif
+ </div>
+ </div>
+</%def>
+
+
## Render the tags 'tags' as an autocomplete element.
-<%def name="render_tagging_element(tagged_item, elt_id, elt_context, use_toggle_link='true', in_form='false', input_size='15', tag_click_fn='default_tag_click_fn', get_toggle_link_text_fn='default_get_toggle_link_text_fn', editable='true')">
+<%def name="render_tagging_element(tagged_item=None, elt_context=None, use_toggle_link=True, in_form=False, input_size='15', tag_click_fn='default_tag_click_fn', get_toggle_link_text_fn='default_get_toggle_link_text_fn', editable=True)">
+ ## Useful attributes.
+ <%
+ tagged_item_id = str( trans.security.encode_id (tagged_item.id) )
+ elt_id = "tag-element-" + tagged_item_id
+ %>
+
+ ## Build HTML.
+ ${self.render_tagging_element_html(tagged_item, editable, use_toggle_link, input_size, in_form)}
+
+ ## Build script that augments tags using progressive javascript.
<script type="text/javascript">
//
// Set up autocomplete tagger.
@@ -70,22 +136,32 @@
var options =
{
tags : ${h.to_json_string(tag_names_and_values)},
- editable : ${str(editable).lower()},
+ editable : ${iff( editable, 'true', 'false' )},
get_toggle_link_text_fn: ${get_toggle_link_text_fn},
tag_click_fn: ${tag_click_fn},
- <% tagged_item_id = trans.security.encode_id(tagged_item.id) %>
ajax_autocomplete_tag_url: "${h.url_for( controller='tag', action='tag_autocomplete_data', id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
ajax_add_tag_url: "${h.url_for( controller='tag', action='add_tag_async', id=tagged_item_id, item_class=tagged_item.__class__.__name__, context=elt_context )}",
ajax_delete_tag_url: "${h.url_for( controller='tag', action='remove_tag_async', id=tagged_item_id, item_class=tagged_item.__class__.__name__, context=elt_context )}",
delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
- add_tag_img: "${h.url_for('/static/images/add_icon.png')}",
- add_tag_img_rollover: "${h.url_for('/static/images/add_icon_dark.png')}",
- input_size: ${input_size},
- in_form: ${in_form},
- use_toggle_link: ${use_toggle_link}
+ use_toggle_link: ${iff( use_toggle_link, 'true', 'false' )},
};
- $("#${elt_id}").autocomplete_tagging(options)
+ $("#${elt_id}").autocomplete_tagging('${elt_id}', options);
</script>
+
+ ## Use style to hide/display the tag area.
+ <style>
+ .tag-area {
+ display: ${iff( use_toggle_link, "none", "block" )};
+ }
+ </style>
+
+ <noscript>
+ <style>
+ .tag-area {
+ display: block;
+ }
+ </style>
+ </noscript>
</%def>
\ No newline at end of file
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/b400212305b6
changeset: 3161:b400212305b6
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Dec 09 10:15:15 2009 -0500
description:
Fix libdrmaa runtime link path on OS X
diffstat:
scripts/scramble/scripts/DRMAA_python-macosx.py | 60 ++++++++++++++++++++++++++++++
1 files changed, 60 insertions(+), 0 deletions(-)
diffs (64 lines):
diff -r 47b702c583a3 -r b400212305b6 scripts/scramble/scripts/DRMAA_python-macosx.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/DRMAA_python-macosx.py Wed Dec 09 10:15:15 2009 -0500
@@ -0,0 +1,60 @@
+import os, sys, shutil
+
+if "SGE_ROOT" not in os.environ:
+ print "scramble(): Please set SGE_ROOT to the path of your SGE installation"
+ print "scramble(): before scrambling DRMAA_python"
+ sys.exit(1)
+
+# change back to the build dir
+if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+# find setuptools
+scramble_lib = os.path.join( "..", "..", "..", "lib" )
+sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
+try:
+ from setuptools import *
+ import pkg_resources
+except:
+ from ez_setup import use_setuptools
+ use_setuptools( download_delay=8, to_dir=scramble_lib )
+ from setuptools import *
+ import pkg_resources
+
+# clean, in case you're running this by hand from a dirty module source dir
+for dir in [ "build", "dist", "gridengine" ]:
+ if os.access( dir, os.F_OK ):
+ print "scramble_it.py: removing dir:", dir
+ shutil.rmtree( dir )
+
+# patch
+file = "setup.py"
+print "scramble(): Patching", file
+if not os.access( "%s.orig" %file, os.F_OK ):
+ shutil.copyfile( file, "%s.orig" %file )
+i = open( "%s.orig" %file, "r" )
+o = open( file, "w" )
+for line in i.readlines():
+ if line == 'SGE6_ROOT="/scratch_test02/SGE6"\n':
+ line = 'SGE6_ROOT="%s"\n' % os.environ["SGE_ROOT"]
+ if line.startswith('link_args ='):
+ line = 'link_args = [ "-L%s" % os.path.join(SGE6_ROOT, "lib", SGE6_ARCH), "-ldrmaa" ]\n'
+ print >>o, line,
+i.close()
+o.close()
+
+# go
+me = sys.argv[0]
+sys.argv = [ me ]
+sys.argv.append( "build" )
+execfile( "setup.py", globals(), locals() )
+
+# fix _cDRMAA.so rpath
+so = "build/lib.%s-%s/_cDRMAA.so" % ( pkg_resources.get_platform(), sys.version[:3] )
+libdrmaa = os.path.join(SGE6_ROOT, "lib", SGE6_ARCH, "libdrmaa.dylib.1.0" )
+os.system( "install_name_tool -change libdrmaa.dylib.1.0 %s %s" % ( libdrmaa, so ) )
+
+sys.argv = [ me ]
+sys.argv.append( "bdist_egg" )
+execfile( "setup.py", globals(), locals() )
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/022a8c94883f
changeset: 3157:022a8c94883f
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Dec 08 11:46:13 2009 -0500
description:
Better approach to altering initial content of output dataset if necessary. The upload tool will now call the data type's groom_output_dataset() method ( a better name than before_setting_metadata since it is not related to metadata ). This will now also run on the cluster.
diffstat:
lib/galaxy/datatypes/binary.py | 75 +++++++++----------------
lib/galaxy/datatypes/chrominfo.py | 3 -
lib/galaxy/datatypes/coverage.py | 5 +-
lib/galaxy/datatypes/data.py | 9 +--
lib/galaxy/datatypes/genetics.py | 64 ---------------------
lib/galaxy/datatypes/images.py | 14 +----
lib/galaxy/datatypes/interval.py | 18 ------
lib/galaxy/datatypes/qualityscore.py | 13 +----
lib/galaxy/datatypes/sequence.py | 29 ---------
lib/galaxy/datatypes/tabular.py | 9 ---
lib/galaxy/datatypes/tracks.py | 3 -
lib/galaxy/datatypes/xml.py | 3 -
lib/galaxy/jobs/__init__.py | 1 -
lib/galaxy/tools/__init__.py | 3 -
lib/galaxy/web/controllers/library.py | 2 -
lib/galaxy/web/controllers/library_admin.py | 2 -
lib/galaxy/web/controllers/root.py | 3 -
tools/data_source/hbvar_filter.py | 1 -
tools/data_source/upload.py | 4 +
tools/maf/maf_to_bed_code.py | 1 -
20 files changed, 38 insertions(+), 224 deletions(-)
diffs (833 lines):
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/binary.py Tue Dec 08 11:46:13 2009 -0500
@@ -17,9 +17,6 @@
class Binary( data.Data ):
"""Binary data"""
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
@@ -36,9 +33,6 @@
"""Class describing an ab1 binary sequence file"""
file_ext = "ab1"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'ab1','name':'ab1 sequence','info':'Sequence file','dbkey':dataset.dbkey} )
@@ -58,39 +52,32 @@
file_ext = "bam"
MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
- def before_setting_metadata( self, dataset ):
- """ Ensures that the Bam file contents are sorted. This function is called on the dataset before set_meta() is called."""
- sorted = False
- try:
- index_file = dataset.metadata.bam_index
- except:
- index_file = None
- if index_file:
- # If an index file already exists on disk, then the data must have previously been sorted
- # since samtools requires a sorted Bam file in order to create an index.
- sorted = os.path.exists( index_file.file_name )
- if not sorted:
- # Use samtools to sort the Bam file
- tmp_dir = tempfile.gettempdir()
- # Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
- tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( dataset.file_name ) )
- # Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
- os.symlink( dataset.file_name, tmp_dataset_file_name )
- # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
- # TODO: This command may also create temporary files <out.prefix>.%d.bam when the
- # whole alignment cannot be fitted into memory ( controlled by option -m ). We're
- # not handling this case here.
- tmp_sorted_dataset_file = tempfile.NamedTemporaryFile( prefix=tmp_dataset_file_name )
- tmp_sorted_dataset_file_name = tmp_sorted_dataset_file.name
- tmp_sorted_dataset_file.close()
- command = "samtools sort %s %s 2>/dev/null" % ( tmp_dataset_file_name, tmp_sorted_dataset_file_name )
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- tmp_sorted_bam_file_name = '%s.bam' % tmp_sorted_dataset_file_name
- # Move tmp_sorted_bam_file_name to our output dataset location
- shutil.move( tmp_sorted_bam_file_name, dataset.file_name )
- # Remove all remaining temporary files
- os.unlink( tmp_dataset_file_name )
+ def groom_dataset_content( self, file_name ):
+ """
+ Ensures that the Bam file contents are sorted. This function is called
+ on an output dataset after the content is initially generated.
+ """
+ # Use samtools to sort the Bam file
+ tmp_dir = tempfile.gettempdir()
+ # Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
+ tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( file_name ) )
+ # Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
+ os.symlink( file_name, tmp_dataset_file_name )
+ # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
+ # TODO: This command may also create temporary files <out.prefix>.%d.bam when the
+ # whole alignment cannot be fitted into memory ( controlled by option -m ). We're
+ # not handling this case here.
+ tmp_sorted_dataset_file = tempfile.NamedTemporaryFile( prefix=tmp_dataset_file_name )
+ tmp_sorted_dataset_file_name = tmp_sorted_dataset_file.name
+ tmp_sorted_dataset_file.close()
+ command = "samtools sort %s %s 2>/dev/null" % ( tmp_dataset_file_name, tmp_sorted_dataset_file_name )
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
+ tmp_sorted_bam_file_name = '%s.bam' % tmp_sorted_dataset_file_name
+ # Move tmp_sorted_bam_file_name to our output dataset location
+ shutil.move( tmp_sorted_bam_file_name, file_name )
+ # Remove all remaining temporary files
+ os.unlink( tmp_dataset_file_name )
def init_meta( self, dataset, copy_from=None ):
Binary.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, **kwd ):
@@ -151,9 +138,6 @@
"""Class describing a zip archive of binary sequence files"""
file_ext = "binseq.zip"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
zip_file = zipfile.ZipFile( dataset.file_name, "r" )
@@ -176,9 +160,6 @@
"""Class describing an scf binary sequence file"""
file_ext = "scf"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'scf','name':'scf sequence','info':'Sequence file','dbkey':dataset.dbkey})
@@ -196,11 +177,9 @@
class Sff( Binary ):
""" Standard Flowgram Format (SFF) """
file_ext = "sff"
+
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
# The first 4 bytes of any sff file is '.sff', and the file is binary. For details
# about the format, see http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=formats&m=doc&s=for…
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/chrominfo.py
--- a/lib/galaxy/datatypes/chrominfo.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/chrominfo.py Tue Dec 08 11:46:13 2009 -0500
@@ -12,6 +12,3 @@
MetadataElement( name="chrom", default=1, desc="Chrom column", param=metadata.ColumnParameter )
MetadataElement( name="length", default=2, desc="Length column", param=metadata.ColumnParameter )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/coverage.py
--- a/lib/galaxy/datatypes/coverage.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/coverage.py Tue Dec 08 11:46:13 2009 -0500
@@ -28,10 +28,7 @@
MetadataElement( name="forwardCol", default=3, desc="Forward or aggregate read column", param=metadata.ColumnParameter )
MetadataElement( name="reverseCol", desc="Optional reverse read column", param=metadata.ColumnParameter, optional=True, no_value=0 )
MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
-
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
+
def get_track_window(self, dataset, data, start, end):
"""
Assumes we have a numpy file.
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/data.py Tue Dec 08 11:46:13 2009 -0500
@@ -84,6 +84,9 @@
except OSError, e:
log.exception('%s reading a file that does not exist %s' % (self.__class__.__name__, dataset.file_name))
return ''
+ def groom_dataset_content( self, file_name ):
+ """This function is called on an output dataset file after the content is initially generated."""
+ pass
def init_meta( self, dataset, copy_from=None ):
# Metadata should be left mostly uninitialized. Dataset will
# handle returning default values when metadata is not set.
@@ -256,9 +259,6 @@
if return_output:
return converted_dataset
return "The file conversion of %s on data %s has been added to the Queue." % (converter.name, original_dataset.hid)
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is set."""
- pass
def after_setting_metadata( self, dataset ):
"""This function is called on the dataset after metadata is set."""
dataset.clear_associated_files( metadata_safe = True )
@@ -346,9 +346,6 @@
def get_mime(self):
"""Returns the mime type of the datatype"""
return 'text/plain'
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is set."""
- pass
def set_meta( self, dataset, **kwd ):
"""
Set the number of lines of data in dataset,
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/genetics.py Tue Dec 08 11:46:13 2009 -0500
@@ -47,9 +47,6 @@
self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
def as_ucsc_display_file( self, dataset, **kwd ):
return open( dataset.file_name )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -205,9 +202,6 @@
"""Initialize featurelistt datatype"""
Tabular.__init__( self, **kwd )
self.column_names = []
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -246,9 +240,6 @@
self.column_names[0] = 'FID'
self.column_names[1] = 'IID'
# this is what Plink wants as at 2009
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff(self,filename):
"""
"""
@@ -273,9 +264,6 @@
rgTabList.__init__( self, **kwd )
for i,s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']):
self.column_names[i] = s
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Rgenetics(Html):
"""
@@ -329,9 +317,6 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, **kwd ):
"""
for lped/pbed eg
@@ -373,9 +358,6 @@
"""
file_ext="snpmatrix"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = "Binary RGenetics file"
@@ -405,9 +387,6 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Pphe(Rgenetics):
"""
@@ -418,9 +397,6 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Lmap(Rgenetics):
"""
@@ -428,10 +404,6 @@
"""
file_ext="lmap"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
-
class Fphe(Rgenetics):
"""
fake class to distinguish different species of Rgenetics data collections
@@ -441,9 +413,6 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.fphe', description = 'FBAT Phenotype File', substitute_name_with_metadata = 'base_name' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Phe(Rgenetics):
"""
@@ -454,9 +423,6 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Fped(Rgenetics):
"""
@@ -467,9 +433,6 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Pbed(Rgenetics):
"""
@@ -482,9 +445,6 @@
self.add_composite_file( '%s.bim', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.bed', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name', is_binary = True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Eigenstratgeno(Rgenetics):
"""
@@ -497,9 +457,6 @@
self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Eigenstratpca(Rgenetics):
"""
@@ -510,27 +467,18 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Snptest(Rgenetics):
"""
fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snptest"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Pheno(Tabular):
"""
base class for pheno files
"""
file_ext = 'pheno'
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class RexpBase( Html ):
"""
@@ -698,9 +646,6 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def init_meta( self, dataset, copy_from=None ):
"""Add metadata elements"""
if copy_from:
@@ -789,9 +734,6 @@
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.affybatch', description = 'AffyBatch R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary=True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class Eset( RexpBase ):
"""derived class for BioC data structures in Galaxy """
@@ -801,9 +743,6 @@
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.eset', description = 'ESet R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
class MAlist( RexpBase ):
"""derived class for BioC data structures in Galaxy """
@@ -813,9 +752,6 @@
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.malist', description = 'MAlist R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
if __name__ == '__main__':
import doctest, sys
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/images.py Tue Dec 08 11:46:13 2009 -0500
@@ -15,9 +15,6 @@
class Image( data.Data ):
"""Class describing an image"""
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = 'Image in %s format' % dataset.extension
@@ -54,9 +51,6 @@
"""Class describing a GMAJ Applet"""
file_ext = "gmaj.zip"
copy_safe_peek = False
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
if hasattr( dataset, 'history_id' ):
@@ -108,9 +102,7 @@
class Html( data.Text ):
"""Class describing an html file"""
file_ext = "html"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
+
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = "HTML file"
@@ -145,9 +137,7 @@
"""Class describing a LAJ Applet"""
file_ext = "laj"
copy_safe_peek = False
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
+
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
if hasattr( dataset, 'history_id' ):
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/interval.py Tue Dec 08 11:46:13 2009 -0500
@@ -75,9 +75,6 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, first_line_is_header = False, **kwd ):
Tabular.set_meta( self, dataset, overwrite = overwrite, skip = 0 )
@@ -343,9 +340,6 @@
MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
###do we need to repeat these? they are the same as should be inherited from interval type
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, **kwd ):
"""Sets the metadata information for datasets previously determined to be in bed format."""
i = 0
@@ -504,9 +498,6 @@
"""Initialize datatype, by adding GBrowse display app"""
Tabular.__init__(self, **kwd)
self.add_display_app ( 'c_elegans', 'display in Wormbase', 'as_gbrowse_display_file', 'gbrowse_links' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -644,9 +635,6 @@
def __init__(self, **kwd):
"""Initialize datatype, by adding GBrowse display app"""
Gff.__init__(self, **kwd)
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -810,9 +798,6 @@
return ret_val
def make_html_table( self, dataset ):
return Tabular.make_html_table( self, dataset, skipchars=['track', '#'] )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -904,9 +889,6 @@
"""Initialize interval datatype, by adding UCSC display app"""
Tabular.__init__(self, **kwd)
self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, overwrite = True, **kwd ):
Tabular.set_meta( self, dataset, overwrite = overwrite, skip = 1 )
def display_peek( self, dataset ):
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/qualityscore.py
--- a/lib/galaxy/datatypes/qualityscore.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/qualityscore.py Tue Dec 08 11:46:13 2009 -0500
@@ -15,9 +15,6 @@
"""
file_ext = "qualsolid"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
"""
>>> fname = get_test_fname( 'sequence.fasta' )
@@ -67,9 +64,6 @@
"""
file_ext = "qual454"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
"""
>>> fname = get_test_fname( 'sequence.fasta' )
@@ -108,9 +102,4 @@
until we know more about quality score formats
"""
file_ext = "qualsolexa"
-
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
-
-
\ No newline at end of file
+
\ No newline at end of file
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/sequence.py Tue Dec 08 11:46:13 2009 -0500
@@ -21,9 +21,6 @@
"""Add metadata elements"""
MetadataElement( name="sequences", default=0, desc="Number of sequences", readonly=True, visible=False, optional=True, no_value=0 )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, **kwd ):
"""
Set the number of sequences and the number of data lines in dataset.
@@ -59,17 +56,10 @@
"""Add metadata elements"""
MetadataElement( name="species", desc="Species", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
-
class Fasta( Sequence ):
"""Class representing a FASTA sequence"""
file_ext = "fasta"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
"""
Determines whether the file is in fasta format
@@ -122,9 +112,6 @@
""" Class representing the SOLID Color-Space sequence ( csfasta ) """
file_ext = "csfasta"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
"""
Color-space sequence:
@@ -166,9 +153,6 @@
"""Class representing a generic FASTQ sequence"""
file_ext = "fastq"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_meta( self, dataset, **kwd ):
"""
Set the number of sequences and the number of data lines
@@ -220,10 +204,6 @@
"""Class representing a FASTQ sequence ( the Sanger variant )"""
file_ext = "fastqsanger"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
-
try:
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
@@ -316,9 +296,6 @@
MetadataElement( name="species_chromosomes", desc="Species Chromosomes", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
MetadataElement( name="maf_index", desc="MAF Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def init_meta( self, dataset, copy_from=None ):
Alignment.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, **kwd ):
@@ -425,9 +402,6 @@
file_ext = "axt"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
"""
Determines whether the file is in axt format
@@ -480,9 +454,6 @@
# here simply for backward compatibility ( although it is still in the datatypes registry ). Subclassing
# from data.Text eliminates managing metadata elements inherited from the Alignemnt class.
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def sniff( self, filename ):
"""
Determines whether the file is in lav format
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/tabular.py Tue Dec 08 11:46:13 2009 -0500
@@ -23,9 +23,6 @@
MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=False, no_value=0 )
MetadataElement( name="column_types", default=[], desc="Column types", param=metadata.ColumnTypesParameter, readonly=True, visible=False, no_value=[] )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def init_meta( self, dataset, copy_from=None ):
data.Text.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, skip = None, **kwd ):
@@ -227,9 +224,6 @@
'Superorder', 'Order', 'Suborder', 'Superfamily', 'Family', 'Subfamily',
'Tribe', 'Subtribe', 'Genus', 'Subgenus', 'Species', 'Subspecies'
]
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -259,9 +253,6 @@
self.column_names = ['QNAME', 'FLAG', 'RNAME', 'POS', 'MAPQ', 'CIGAR',
'MRNM', 'MPOS', 'ISIZE', 'SEQ', 'QUAL', 'OPT'
]
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/tracks.py
--- a/lib/galaxy/datatypes/tracks.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/tracks.py Tue Dec 08 11:46:13 2009 -0500
@@ -23,9 +23,6 @@
def __init__(self, **kwargs):
super( GeneTrack, self ).__init__( **kwargs )
self.add_display_app( 'genetrack', 'View in', '', 'genetrack_link' )
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def get_display_links( self, dataset, type, app, base_url, target_frame='galaxy_main', **kwd ):
return data.Data.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd )
def genetrack_link( self, hda, type, app, base_url ):
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/datatypes/xml.py Tue Dec 08 11:46:13 2009 -0500
@@ -11,9 +11,6 @@
"""NCBI Blast XML Output data"""
file_ext = "blastxml"
- def before_setting_metadata( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
- pass
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/jobs/__init__.py Tue Dec 08 11:46:13 2009 -0500
@@ -537,7 +537,6 @@
#it would be quicker to just copy the metadata from the originating output dataset,
#but somewhat trickier (need to recurse up the copied_from tree), for now we'll call set_meta()
if not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ):
- dataset.datatype.before_setting_metadata( dataset )
# Only set metadata values if they are missing...
dataset.set_meta( overwrite = False )
else:
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/tools/__init__.py Tue Dec 08 11:46:13 2009 -0500
@@ -1418,7 +1418,6 @@
if data.extension != data_type:
data = app.datatypes_registry.change_datatype( data, data_type )
elif not isinstance( data.datatype, datatypes.interval.Bed ) and isinstance( data.datatype, datatypes.interval.Interval ):
- data.datatype.before_setting_metadata( data )
data.set_meta()
if data.missing_meta():
data = app.datatypes_registry.change_datatype( data, 'tabular' )
@@ -1473,7 +1472,6 @@
self.sa_session.flush()
child_dataset.set_size()
child_dataset.name = "Secondary Dataset (%s)" % ( designation )
- child_dataset.datatype.before_setting_metadata( child_dataset )
child_dataset.init_meta()
child_dataset.set_meta()
child_dataset.set_peek()
@@ -1533,7 +1531,6 @@
primary_data.set_size()
primary_data.name = outdata.name
primary_data.info = outdata.info
- primary_dataset.datatype.before_setting_metadata( primary_dataset )
primary_data.init_meta( copy_from=outdata )
primary_data.dbkey = dbkey
primary_data.set_meta()
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/web/controllers/library.py Tue Dec 08 11:46:13 2009 -0500
@@ -487,7 +487,6 @@
if name not in [ 'name', 'info', 'dbkey' ]:
if spec.get( 'default' ):
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- ldda.datatype.before_setting_metadata( ldda )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_setting_metadata( ldda )
trans.sa_session.flush()
@@ -521,7 +520,6 @@
msg=msg,
messagetype=messagetype )
if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
- ldda.datatype.before_setting_metadata( ldda )
if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Tue Dec 08 11:46:13 2009 -0500
@@ -493,7 +493,6 @@
if name not in [ 'name', 'info', 'dbkey' ]:
if spec.get( 'default' ):
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- ldda.datatype.before_setting_metadata( ldda )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_setting_metadata( ldda )
trans.sa_session.flush()
@@ -517,7 +516,6 @@
widgets=widgets,
msg=msg,
messagetype=messagetype )
- ldda.datatype.before_setting_metadata( ldda )
if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
diff -r 83dc9642a59e -r 022a8c94883f lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Tue Dec 08 09:05:35 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Tue Dec 08 11:46:13 2009 -0500
@@ -321,7 +321,6 @@
trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
else:
msg = 'Attributes updated'
- data.datatype.before_setting_metadata( data )
data.set_meta()
data.datatype.after_setting_metadata( data )
trans.sa_session.flush()
@@ -346,7 +345,6 @@
trans.sa_session.refresh( data.dataset )
else:
return trans.show_error_message( "You are not authorized to change this dataset's permissions" )
- data.datatype.before_setting_metadata( data )
if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
@@ -521,7 +519,6 @@
data_file.close()
data.state = data.states.OK
data.set_size()
- data.datatype.before_setting_metadata( data )
data.init_meta()
data.set_meta()
trans.sa_session.flush()
diff -r 83dc9642a59e -r 022a8c94883f tools/data_source/hbvar_filter.py
--- a/tools/data_source/hbvar_filter.py Tue Dec 08 09:05:35 2009 -0500
+++ b/tools/data_source/hbvar_filter.py Tue Dec 08 11:46:13 2009 -0500
@@ -46,7 +46,6 @@
fp.close()
#Set meta data, format file to be valid interval type
if isinstance(data.datatype, datatypes.interval.Interval):
- data.datatype.before_setting_metadata( data )
data.set_meta(first_line_is_header=True)
#check for missing meta data, if all there, comment first line and process file
if not data.missing_meta():
diff -r 83dc9642a59e -r 022a8c94883f tools/data_source/upload.py
--- a/tools/data_source/upload.py Tue Dec 08 09:05:35 2009 -0500
+++ b/tools/data_source/upload.py Tue Dec 08 11:46:13 2009 -0500
@@ -10,6 +10,7 @@
import galaxy.model
from galaxy.datatypes import sniff
from galaxy.datatypes.binary import *
+from galaxy.datatypes.registry import Registry
from galaxy import util
from galaxy.util.json import *
@@ -264,6 +265,9 @@
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
+ # Groom the dataset content if necessary
+ datatype = Registry().get_datatype_by_extension( ext )
+ datatype.groom_dataset_content( output_path )
def add_composite_file( dataset, json_file, output_path, files_path ):
if dataset.composite_files:
diff -r 83dc9642a59e -r 022a8c94883f tools/maf/maf_to_bed_code.py
--- a/tools/maf/maf_to_bed_code.py Tue Dec 08 09:05:35 2009 -0500
+++ b/tools/maf/maf_to_bed_code.py Tue Dec 08 11:46:13 2009 -0500
@@ -45,7 +45,6 @@
newdata.info = "The requested file is missing from the system."
newdata.state = newdata.states.ERROR
newdata.dbkey = dbkey
- newdata.datatype.before_setting_metadata( newdata )
newdata.init_meta()
newdata.set_meta()
newdata.set_peek()
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/83dc9642a59e
changeset: 3156:83dc9642a59e
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Tue Dec 08 09:05:35 2009 -0500
description:
Rename the before_edit() and after_edit() metadata related methods to be more appropriately named before_setting_metadata() and after_setting_metadata(), and add the metthods to all datatype classes. Move sorting of Bam files from the Bam set_meta() method to this new method.
diffstat:
lib/galaxy/datatypes/binary.py | 130 ++++++++++++++++----------------
lib/galaxy/datatypes/chrominfo.py | 5 +-
lib/galaxy/datatypes/coverage.py | 4 +-
lib/galaxy/datatypes/data.py | 11 +-
lib/galaxy/datatypes/genetics.py | 66 ++++++++++++++++-
lib/galaxy/datatypes/images.py | 12 +++
lib/galaxy/datatypes/interval.py | 21 ++++-
lib/galaxy/datatypes/qualityscore.py | 11 ++
lib/galaxy/datatypes/sequence.py | 30 +++++++-
lib/galaxy/datatypes/tabular.py | 9 ++
lib/galaxy/datatypes/tracks.py | 3 +
lib/galaxy/datatypes/xml.py | 4 +
lib/galaxy/jobs/__init__.py | 1 +
lib/galaxy/tools/__init__.py | 5 +-
lib/galaxy/web/controllers/library.py | 7 +-
lib/galaxy/web/controllers/library_admin.py | 7 +-
lib/galaxy/web/controllers/root.py | 9 +-
tools/data_source/hbvar_filter.py | 1 +
tools/maf/maf_to_bed_code.py | 1 +
tools/samtools/sam_to_bam.py | 21 ++++-
20 files changed, 271 insertions(+), 87 deletions(-)
diffs (968 lines):
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/binary.py Tue Dec 08 09:05:35 2009 -0500
@@ -17,6 +17,9 @@
class Binary( data.Data ):
"""Binary data"""
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
@@ -32,6 +35,10 @@
class Ab1( Binary ):
"""Class describing an ab1 binary sequence file"""
file_ext = "ab1"
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
export_url = "/history_add_to?" + urlencode( {'history_id':dataset.history_id,'ext':'ab1','name':'ab1 sequence','info':'Sequence file','dbkey':dataset.dbkey} )
@@ -51,83 +58,67 @@
file_ext = "bam"
MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
- def init_meta( self, dataset, copy_from=None ):
- Binary.init_meta( self, dataset, copy_from=copy_from )
- def set_meta( self, dataset, overwrite = True, **kwd ):
- """ Ensures that the Bam file contents are sorted and creates the index for the BAM file. """
- errors = False
- # These metadata values are not accessible by users, always overwrite
- index_file = dataset.metadata.bam_index
+ def before_setting_metadata( self, dataset ):
+ """ Ensures that the Bam file contents are sorted. This function is called on the dataset before set_meta() is called."""
+ sorted = False
+ try:
+ index_file = dataset.metadata.bam_index
+ except:
+ index_file = None
if index_file:
# If an index file already exists on disk, then the data must have previously been sorted
# since samtools requires a sorted Bam file in order to create an index.
sorted = os.path.exists( index_file.file_name )
- else:
- index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset )
- sorted = False
- tmp_dir = tempfile.gettempdir()
- try:
+ if not sorted:
+ # Use samtools to sort the Bam file
+ tmp_dir = tempfile.gettempdir()
# Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( dataset.file_name ) )
# Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
os.symlink( dataset.file_name, tmp_dataset_file_name )
+ # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
+ # TODO: This command may also create temporary files <out.prefix>.%d.bam when the
+ # whole alignment cannot be fitted into memory ( controlled by option -m ). We're
+ # not handling this case here.
+ tmp_sorted_dataset_file = tempfile.NamedTemporaryFile( prefix=tmp_dataset_file_name )
+ tmp_sorted_dataset_file_name = tmp_sorted_dataset_file.name
+ tmp_sorted_dataset_file.close()
+ command = "samtools sort %s %s 2>/dev/null" % ( tmp_dataset_file_name, tmp_sorted_dataset_file_name )
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
+ tmp_sorted_bam_file_name = '%s.bam' % tmp_sorted_dataset_file_name
+ # Move tmp_sorted_bam_file_name to our output dataset location
+ shutil.move( tmp_sorted_bam_file_name, dataset.file_name )
+ # Remove all remaining temporary files
+ os.unlink( tmp_dataset_file_name )
+ def init_meta( self, dataset, copy_from=None ):
+ Binary.init_meta( self, dataset, copy_from=copy_from )
+ def set_meta( self, dataset, overwrite = True, **kwd ):
+ """ Creates the index for the BAM file. """
+ # These metadata values are not accessible by users, always overwrite
+ index_file = dataset.metadata.bam_index
+ if not index_file:
+ index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset )
+ tmp_dir = tempfile.gettempdir()
+ # Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
+ tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( dataset.file_name ) )
+ # Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
+ os.symlink( dataset.file_name, tmp_dataset_file_name )
+ errors = False
+ try:
+ # Create the Bam index
+ command = 'samtools index %s' % tmp_dataset_file_name
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
except Exception, e:
errors = True
- err_msg = 'Error creating tmp symlink to file (%s). ' % str( dataset.file_name )
+ err_msg = 'Error creating index for BAM file (%s)' % str( tmp_dataset_file_name )
log.exception( err_msg )
sys.stderr.write( err_msg + str( e ) )
- if not errors and not sorted:
- try:
- # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
- # TODO: This command may also create temporary files <out.prefix>.%d.bam when the
- # whole alignment cannot be fitted into memory ( controlled by option -m ). We're
- # not handling this case here.
- tmp_sorted_dataset_file = tempfile.NamedTemporaryFile( prefix=tmp_dataset_file_name )
- tmp_sorted_dataset_file_name = tmp_sorted_dataset_file.name
- tmp_sorted_dataset_file.close()
- command = "samtools sort %s %s 2>/dev/null" % ( tmp_dataset_file_name, tmp_sorted_dataset_file_name )
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- except Exception, e:
- errors = True
- err_msg = 'Error sorting alignments from (%s). ' % tmp_dataset_file_name
- log.exception( err_msg )
- sys.stderr.write( err_msg + str( e ) )
if not errors:
- if sorted:
- try:
- # Create the Bam index
- command = 'samtools index %s' % tmp_dataset_file_name
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- except Exception, e:
- errors = True
- err_msg = 'Error creating index for BAM file (%s)' % str( tmp_dataset_file_name )
- log.exception( err_msg )
- sys.stderr.write( err_msg + str( e ) )
- else:
- tmp_sorted_bam_file_name = '%s.bam' % tmp_sorted_dataset_file_name
- try:
- # Create the Bam index
- command = 'samtools index %s' % tmp_sorted_bam_file_name
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- except Exception, e:
- errors = True
- err_msg = 'Error creating index for BAM file (%s)' % str( tmp_sorted_dataset_file_name )
- log.exception( err_msg )
- sys.stderr.write( err_msg + str( e ) )
- if not errors:
- if sorted:
- # Move the temporary index file ~/tmp/dataset_XX.dat.bai to our metadata file
- # storage location ~/database/files/_metadata_files/dataset_XX.dat
- shutil.move( '%s.bai' % ( tmp_dataset_file_name ), index_file.file_name )
- else:
- # Move tmp_sorted_bam_file_name to our output dataset location
- shutil.move( tmp_sorted_bam_file_name, dataset.file_name )
- # Move the temporary sorted index file ~/tmp/dataset_XX.dat.bai to our metadata file
- # storage location ~/database/files/_metadata_files/dataset_XX.dat
- shutil.move( '%s.bai' % ( tmp_sorted_bam_file_name ), index_file.file_name )
+ # Move the temporary index file ~/tmp/dataset_XX.dat.bai to our metadata file
+ # storage location ~/database/files/_metadata_files/dataset_XX.dat
+ shutil.move( '%s.bai' % ( tmp_dataset_file_name ), index_file.file_name )
# Remove all remaining temporary files
os.unlink( tmp_dataset_file_name )
# Set the metadata
@@ -159,6 +150,10 @@
class Binseq( Binary ):
"""Class describing a zip archive of binary sequence files"""
file_ext = "binseq.zip"
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
zip_file = zipfile.ZipFile( dataset.file_name, "r" )
@@ -180,6 +175,10 @@
class Scf( Binary ):
"""Class describing an scf binary sequence file"""
file_ext = "scf"
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'scf','name':'scf sequence','info':'Sequence file','dbkey':dataset.dbkey})
@@ -199,6 +198,9 @@
file_ext = "sff"
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
# The first 4 bytes of any sff file is '.sff', and the file is binary. For details
# about the format, see http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=formats&m=doc&s=for…
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/chrominfo.py
--- a/lib/galaxy/datatypes/chrominfo.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/chrominfo.py Tue Dec 08 09:05:35 2009 -0500
@@ -11,4 +11,7 @@
file_ext = "len"
MetadataElement( name="chrom", default=1, desc="Chrom column", param=metadata.ColumnParameter )
MetadataElement( name="length", default=2, desc="Length column", param=metadata.ColumnParameter )
-
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/coverage.py
--- a/lib/galaxy/datatypes/coverage.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/coverage.py Tue Dec 08 09:05:35 2009 -0500
@@ -29,7 +29,9 @@
MetadataElement( name="reverseCol", desc="Optional reverse read column", param=metadata.ColumnParameter, optional=True, no_value=0 )
MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
-
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def get_track_window(self, dataset, data, start, end):
"""
Assumes we have a numpy file.
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/data.py Tue Dec 08 09:05:35 2009 -0500
@@ -256,11 +256,11 @@
if return_output:
return converted_dataset
return "The file conversion of %s on data %s has been added to the Queue." % (converter.name, original_dataset.hid)
- def before_edit( self, dataset ):
- """This function is called on the dataset before metadata is edited."""
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is set."""
pass
- def after_edit( self, dataset ):
- """This function is called on the dataset after metadata is edited."""
+ def after_setting_metadata( self, dataset ):
+ """This function is called on the dataset after metadata is set."""
dataset.clear_associated_files( metadata_safe = True )
def __new_composite_file( self, name, optional = False, mimetype = None, description = None, substitute_name_with_metadata = None, is_binary = False, space_to_tab = True, **kwds ):
kwds[ 'name' ] = name
@@ -346,6 +346,9 @@
def get_mime(self):
"""Returns the mime type of the datatype"""
return 'text/plain'
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is set."""
+ pass
def set_meta( self, dataset, **kwd ):
"""
Set the number of lines of data in dataset,
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/genetics.py Tue Dec 08 09:05:35 2009 -0500
@@ -47,6 +47,9 @@
self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
def as_ucsc_display_file( self, dataset, **kwd ):
return open( dataset.file_name )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -202,6 +205,9 @@
"""Initialize featurelistt datatype"""
Tabular.__init__( self, **kwd )
self.column_names = []
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -240,6 +246,9 @@
self.column_names[0] = 'FID'
self.column_names[1] = 'IID'
# this is what Plink wants as at 2009
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff(self,filename):
"""
"""
@@ -264,6 +273,9 @@
rgTabList.__init__( self, **kwd )
for i,s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']):
self.column_names[i] = s
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Rgenetics(Html):
"""
@@ -317,6 +329,9 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, **kwd ):
"""
for lped/pbed eg
@@ -358,6 +373,9 @@
"""
file_ext="snpmatrix"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = "Binary RGenetics file"
@@ -387,6 +405,9 @@
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name', is_binary = True )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Pphe(Rgenetics):
"""
@@ -397,6 +418,9 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.pphe', description = 'Plink Phenotype File', substitute_name_with_metadata = 'base_name' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Lmap(Rgenetics):
"""
@@ -404,6 +428,10 @@
"""
file_ext="lmap"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
+
class Fphe(Rgenetics):
"""
fake class to distinguish different species of Rgenetics data collections
@@ -413,6 +441,9 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.fphe', description = 'FBAT Phenotype File', substitute_name_with_metadata = 'base_name' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Phe(Rgenetics):
"""
@@ -423,6 +454,9 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.phe', description = 'Phenotype File', substitute_name_with_metadata = 'base_name' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Fped(Rgenetics):
"""
@@ -433,6 +467,9 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.fped', description = 'FBAT format pedfile', substitute_name_with_metadata = 'base_name' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Pbed(Rgenetics):
"""
@@ -445,6 +482,9 @@
self.add_composite_file( '%s.bim', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.bed', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name', is_binary = True )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Eigenstratgeno(Rgenetics):
"""
@@ -457,6 +497,9 @@
self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name', is_binary = True )
self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name', is_binary = True )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Eigenstratpca(Rgenetics):
"""
@@ -467,18 +510,27 @@
def __init__( self, **kwd ):
Rgenetics.__init__(self, **kwd)
self.add_composite_file( '%s.eigenstratpca', description = 'Eigenstrat PCA file', substitute_name_with_metadata = 'base_name' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Snptest(Rgenetics):
"""
fake class to distinguish different species of Rgenetics data collections
"""
file_ext="snptest"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class Pheno(Tabular):
"""
base class for pheno files
"""
file_ext = 'pheno'
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class RexpBase( Html ):
"""
@@ -646,6 +698,9 @@
f.write("\n".join( rval ))
f.write('\n')
f.close()
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def init_meta( self, dataset, copy_from=None ):
"""Add metadata elements"""
if copy_from:
@@ -734,7 +789,10 @@
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.affybatch', description = 'AffyBatch R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary=True )
-
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
+
class Eset( RexpBase ):
"""derived class for BioC data structures in Galaxy """
file_ext = "eset"
@@ -743,6 +801,9 @@
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.eset', description = 'ESet R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
class MAlist( RexpBase ):
"""derived class for BioC data structures in Galaxy """
@@ -752,6 +813,9 @@
RexpBase.__init__(self, **kwd)
self.add_composite_file( '%s.malist', description = 'MAlist R object saved to file',
substitute_name_with_metadata = 'base_name', is_binary = True )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
if __name__ == '__main__':
import doctest, sys
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/images.py Tue Dec 08 09:05:35 2009 -0500
@@ -15,6 +15,9 @@
class Image( data.Data ):
"""Class describing an image"""
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = 'Image in %s format' % dataset.extension
@@ -51,6 +54,9 @@
"""Class describing a GMAJ Applet"""
file_ext = "gmaj.zip"
copy_safe_peek = False
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
if hasattr( dataset, 'history_id' ):
@@ -102,6 +108,9 @@
class Html( data.Text ):
"""Class describing an html file"""
file_ext = "html"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = "HTML file"
@@ -136,6 +145,9 @@
"""Class describing a LAJ Applet"""
file_ext = "laj"
copy_safe_peek = False
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
if hasattr( dataset, 'history_id' ):
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/interval.py Tue Dec 08 09:05:35 2009 -0500
@@ -75,7 +75,9 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
-
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, first_line_is_header = False, **kwd ):
Tabular.set_meta( self, dataset, overwrite = overwrite, skip = 0 )
@@ -340,7 +342,10 @@
MetadataElement( name="strandCol", desc="Strand column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
###do we need to repeat these? they are the same as should be inherited from interval type
-
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, **kwd ):
"""Sets the metadata information for datasets previously determined to be in bed format."""
i = 0
@@ -499,6 +504,9 @@
"""Initialize datatype, by adding GBrowse display app"""
Tabular.__init__(self, **kwd)
self.add_display_app ( 'c_elegans', 'display in Wormbase', 'as_gbrowse_display_file', 'gbrowse_links' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -636,6 +644,9 @@
def __init__(self, **kwd):
"""Initialize datatype, by adding GBrowse display app"""
Gff.__init__(self, **kwd)
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -799,6 +810,9 @@
return ret_val
def make_html_table( self, dataset ):
return Tabular.make_html_table( self, dataset, skipchars=['track', '#'] )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, **kwd ):
i = 0
for i, line in enumerate( file ( dataset.file_name ) ):
@@ -890,6 +904,9 @@
"""Initialize interval datatype, by adding UCSC display app"""
Tabular.__init__(self, **kwd)
self.add_display_app ( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, overwrite = True, **kwd ):
Tabular.set_meta( self, dataset, overwrite = overwrite, skip = 1 )
def display_peek( self, dataset ):
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/qualityscore.py
--- a/lib/galaxy/datatypes/qualityscore.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/qualityscore.py Tue Dec 08 09:05:35 2009 -0500
@@ -15,6 +15,9 @@
"""
file_ext = "qualsolid"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
"""
>>> fname = get_test_fname( 'sequence.fasta' )
@@ -64,6 +67,9 @@
"""
file_ext = "qual454"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
"""
>>> fname = get_test_fname( 'sequence.fasta' )
@@ -102,4 +108,9 @@
until we know more about quality score formats
"""
file_ext = "qualsolexa"
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
+
\ No newline at end of file
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/sequence.py Tue Dec 08 09:05:35 2009 -0500
@@ -21,6 +21,9 @@
"""Add metadata elements"""
MetadataElement( name="sequences", default=0, desc="Number of sequences", readonly=True, visible=False, optional=True, no_value=0 )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, **kwd ):
"""
Set the number of sequences and the number of data lines in dataset.
@@ -56,11 +59,17 @@
"""Add metadata elements"""
MetadataElement( name="species", desc="Species", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
+
class Fasta( Sequence ):
"""Class representing a FASTA sequence"""
-
file_ext = "fasta"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
"""
Determines whether the file is in fasta format
@@ -113,6 +122,9 @@
""" Class representing the SOLID Color-Space sequence ( csfasta ) """
file_ext = "csfasta"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
"""
Color-space sequence:
@@ -154,6 +166,9 @@
"""Class representing a generic FASTQ sequence"""
file_ext = "fastq"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_meta( self, dataset, **kwd ):
"""
Set the number of sequences and the number of data lines
@@ -205,6 +220,10 @@
"""Class representing a FASTQ sequence ( the Sanger variant )"""
file_ext = "fastqsanger"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
+
try:
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
@@ -297,6 +316,9 @@
MetadataElement( name="species_chromosomes", desc="Species Chromosomes", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
MetadataElement( name="maf_index", desc="MAF Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def init_meta( self, dataset, copy_from=None ):
Alignment.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, **kwd ):
@@ -403,6 +425,9 @@
file_ext = "axt"
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
"""
Determines whether the file is in axt format
@@ -455,6 +480,9 @@
# here simply for backward compatibility ( although it is still in the datatypes registry ). Subclassing
# from data.Text eliminates managing metadata elements inherited from the Alignemnt class.
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def sniff( self, filename ):
"""
Determines whether the file is in lav format
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/tabular.py Tue Dec 08 09:05:35 2009 -0500
@@ -23,6 +23,9 @@
MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=False, no_value=0 )
MetadataElement( name="column_types", default=[], desc="Column types", param=metadata.ColumnTypesParameter, readonly=True, visible=False, no_value=[] )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def init_meta( self, dataset, copy_from=None ):
data.Text.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, skip = None, **kwd ):
@@ -224,6 +227,9 @@
'Superorder', 'Order', 'Suborder', 'Superfamily', 'Family', 'Subfamily',
'Tribe', 'Subtribe', 'Genus', 'Subgenus', 'Species', 'Subspecies'
]
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -253,6 +259,9 @@
self.column_names = ['QNAME', 'FLAG', 'RNAME', 'POS', 'MAPQ', 'CIGAR',
'MRNM', 'MPOS', 'ISIZE', 'SEQ', 'QUAL', 'OPT'
]
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def make_html_table( self, dataset, skipchars=[] ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/tracks.py
--- a/lib/galaxy/datatypes/tracks.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/tracks.py Tue Dec 08 09:05:35 2009 -0500
@@ -23,6 +23,9 @@
def __init__(self, **kwargs):
super( GeneTrack, self ).__init__( **kwargs )
self.add_display_app( 'genetrack', 'View in', '', 'genetrack_link' )
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def get_display_links( self, dataset, type, app, base_url, target_frame='galaxy_main', **kwd ):
return data.Data.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd )
def genetrack_link( self, hda, type, app, base_url ):
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/datatypes/xml.py Tue Dec 08 09:05:35 2009 -0500
@@ -10,6 +10,10 @@
class BlastXml( data.Text ):
"""NCBI Blast XML Output data"""
file_ext = "blastxml"
+
+ def before_setting_metadata( self, dataset ):
+ """This function is called on the dataset before metadata is edited."""
+ pass
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/jobs/__init__.py Tue Dec 08 09:05:35 2009 -0500
@@ -537,6 +537,7 @@
#it would be quicker to just copy the metadata from the originating output dataset,
#but somewhat trickier (need to recurse up the copied_from tree), for now we'll call set_meta()
if not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ):
+ dataset.datatype.before_setting_metadata( dataset )
# Only set metadata values if they are missing...
dataset.set_meta( overwrite = False )
else:
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/tools/__init__.py Tue Dec 08 09:05:35 2009 -0500
@@ -1418,6 +1418,7 @@
if data.extension != data_type:
data = app.datatypes_registry.change_datatype( data, data_type )
elif not isinstance( data.datatype, datatypes.interval.Bed ) and isinstance( data.datatype, datatypes.interval.Interval ):
+ data.datatype.before_setting_metadata( data )
data.set_meta()
if data.missing_meta():
data = app.datatypes_registry.change_datatype( data, 'tabular' )
@@ -1472,6 +1473,7 @@
self.sa_session.flush()
child_dataset.set_size()
child_dataset.name = "Secondary Dataset (%s)" % ( designation )
+ child_dataset.datatype.before_setting_metadata( child_dataset )
child_dataset.init_meta()
child_dataset.set_meta()
child_dataset.set_peek()
@@ -1531,6 +1533,7 @@
primary_data.set_size()
primary_data.name = outdata.name
primary_data.info = outdata.info
+ primary_dataset.datatype.before_setting_metadata( primary_dataset )
primary_data.init_meta( copy_from=outdata )
primary_data.dbkey = dbkey
primary_data.set_meta()
@@ -1567,7 +1570,7 @@
dataset.metadata.from_JSON_dict( external_metadata.get_output_filenames_by_dataset( dataset, app.model.context ).filename_out )
# If setting external metadata has failed, how can we inform the user?
# For now, we'll leave the default metadata and set the state back to its original.
- dataset.datatype.after_edit( dataset )
+ dataset.datatype.after_setting_metadata( dataset )
dataset.state = param_dict.get( '__ORIGINAL_DATASET_STATE__' )
self.sa_session.add( dataset )
self.sa_session.flush()
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/web/controllers/library.py Tue Dec 08 09:05:35 2009 -0500
@@ -465,7 +465,7 @@
else:
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
- ldda.datatype.after_edit( ldda )
+ ldda.datatype.after_setting_metadata( ldda )
trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
@@ -487,8 +487,9 @@
if name not in [ 'name', 'info', 'dbkey' ]:
if spec.get( 'default' ):
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+ ldda.datatype.before_setting_metadata( ldda )
ldda.datatype.set_meta( ldda )
- ldda.datatype.after_edit( ldda )
+ ldda.datatype.after_setting_metadata( ldda )
trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
@@ -520,7 +521,7 @@
msg=msg,
messagetype=messagetype )
if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
- ldda.datatype.before_edit( ldda )
+ ldda.datatype.before_setting_metadata( ldda )
if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Tue Dec 08 09:05:35 2009 -0500
@@ -475,7 +475,7 @@
else:
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
- ldda.datatype.after_edit( ldda )
+ ldda.datatype.after_setting_metadata( ldda )
trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
@@ -493,8 +493,9 @@
if name not in [ 'name', 'info', 'dbkey' ]:
if spec.get( 'default' ):
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+ ldda.datatype.before_setting_metadata( ldda )
ldda.datatype.set_meta( ldda )
- ldda.datatype.after_edit( ldda )
+ ldda.datatype.after_setting_metadata( ldda )
trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
@@ -516,7 +517,7 @@
widgets=widgets,
msg=msg,
messagetype=messagetype )
- ldda.datatype.before_edit( ldda )
+ ldda.datatype.before_setting_metadata( ldda )
if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
diff -r 8feff3bc14bc -r 83dc9642a59e lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Mon Dec 07 16:04:33 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Tue Dec 08 09:05:35 2009 -0500
@@ -301,7 +301,7 @@
setattr( data.metadata, name, other )
else:
setattr( data.metadata, name, spec.unwrap( params.get (name, None) ) )
- data.datatype.after_edit( data )
+ data.datatype.after_setting_metadata( data )
else:
msg = ' (Metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.)'
trans.sa_session.flush()
@@ -321,8 +321,9 @@
trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
else:
msg = 'Attributes updated'
+ data.datatype.before_setting_metadata( data )
data.set_meta()
- data.datatype.after_edit( data )
+ data.datatype.after_setting_metadata( data )
trans.sa_session.flush()
return trans.show_ok_message( msg, refresh_frames=['history'] )
elif params.convert_data:
@@ -345,8 +346,7 @@
trans.sa_session.refresh( data.dataset )
else:
return trans.show_error_message( "You are not authorized to change this dataset's permissions" )
- data.datatype.before_edit( data )
-
+ data.datatype.before_setting_metadata( data )
if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
@@ -521,6 +521,7 @@
data_file.close()
data.state = data.states.OK
data.set_size()
+ data.datatype.before_setting_metadata( data )
data.init_meta()
data.set_meta()
trans.sa_session.flush()
diff -r 8feff3bc14bc -r 83dc9642a59e tools/data_source/hbvar_filter.py
--- a/tools/data_source/hbvar_filter.py Mon Dec 07 16:04:33 2009 -0500
+++ b/tools/data_source/hbvar_filter.py Tue Dec 08 09:05:35 2009 -0500
@@ -46,6 +46,7 @@
fp.close()
#Set meta data, format file to be valid interval type
if isinstance(data.datatype, datatypes.interval.Interval):
+ data.datatype.before_setting_metadata( data )
data.set_meta(first_line_is_header=True)
#check for missing meta data, if all there, comment first line and process file
if not data.missing_meta():
diff -r 8feff3bc14bc -r 83dc9642a59e tools/maf/maf_to_bed_code.py
--- a/tools/maf/maf_to_bed_code.py Mon Dec 07 16:04:33 2009 -0500
+++ b/tools/maf/maf_to_bed_code.py Tue Dec 08 09:05:35 2009 -0500
@@ -45,6 +45,7 @@
newdata.info = "The requested file is missing from the system."
newdata.state = newdata.states.ERROR
newdata.dbkey = dbkey
+ newdata.datatype.before_setting_metadata( newdata )
newdata.init_meta()
newdata.set_meta()
newdata.set_peek()
diff -r 8feff3bc14bc -r 83dc9642a59e tools/samtools/sam_to_bam.py
--- a/tools/samtools/sam_to_bam.py Mon Dec 07 16:04:33 2009 -0500
+++ b/tools/samtools/sam_to_bam.py Tue Dec 08 09:05:35 2009 -0500
@@ -79,18 +79,35 @@
tmp_aligns_file = tempfile.NamedTemporaryFile()
tmp_aligns_file_name = tmp_aligns_file.name
tmp_aligns_file.close()
+ # IMPORTANT NOTE: for some reason the samtools view command gzips the resulting bam file without warning,
+ # and the docs do not currently state that this occurs ( very bad ).
command = "samtools view -bt %s -o %s %s 2>/dev/null" % ( fai_index_file_path, tmp_aligns_file_name, options.input1 )
proc = subprocess.Popen( args=command, shell=True )
proc.wait()
- shutil.move( tmp_aligns_file_name, options.output1 )
except Exception, e:
stop_err( 'Error extracting alignments from (%s), %s' % ( options.input1, str( e ) ) )
- # NOTE: samtools requires the Bam file to be sorted, but this occurs in Bam().set_meta() to ensure that uploaded Bam files are sorted as well.
+ try:
+ # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created. This command
+ # may also create temporary files <out.prefix>.%d.bam when the whole alignment cannot be fitted
+ # into memory ( controlled by option -m ).
+ tmp_sorted_aligns_file = tempfile.NamedTemporaryFile()
+ tmp_sorted_aligns_file_name = tmp_sorted_aligns_file.name
+ tmp_sorted_aligns_file.close()
+ command = "samtools sort %s %s 2>/dev/null" % ( tmp_aligns_file_name, tmp_sorted_aligns_file_name )
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
+ except Exception, e:
+ stop_err( 'Error sorting alignments from (%s), %s' % ( tmp_aligns_file_name, str( e ) ) )
+ # Move tmp_aligns_file_name to our output dataset location
+ sorted_bam_file = '%s.bam' % tmp_sorted_aligns_file_name
+ shutil.move( sorted_bam_file, options.output1 )
if options.ref_file != "None":
# Remove the symlink from /tmp/dataset_13.dat to ~/database/files/000/dataset_13.dat
os.unlink( fai_index_file_path )
# Remove the index file
index_file_name = '%s.fai' % fai_index_file_path
os.unlink( index_file_name )
+ # Remove the tmp_aligns_file_name
+ os.unlink( tmp_aligns_file_name )
if __name__=="__main__": __main__()
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/119315b57656
changeset: 3154:119315b57656
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Mon Dec 07 15:57:06 2009 -0500
description:
Changed Lastz call for tabular (general) format to suppress header
diffstat:
test-data/lastz_wrapper_out4.tabular | 2 --
tools/sr_mapping/lastz_wrapper.py | 4 ++--
tools/sr_mapping/lastz_wrapper.xml | 14 ++++++++++++--
3 files changed, 14 insertions(+), 6 deletions(-)
diffs (65 lines):
diff -r 14214b45db3f -r 119315b57656 test-data/lastz_wrapper_out4.tabular
--- a/test-data/lastz_wrapper_out4.tabular Mon Dec 07 14:45:47 2009 -0500
+++ b/test-data/lastz_wrapper_out4.tabular Mon Dec 07 15:57:06 2009 -0500
@@ -1,4 +1,3 @@
-#score name1 strand1 size1 start1 zstart1 end1 length1 text1 name2 strand2 size2 start2 zstart2 end2 start2+ zstart2+ end2+ length2 text2 diff cigar identity idPct coverage covPct gaprate gapPct diagonal shingle
36 phiX1 + 2310 229 228 264 36 GATGAGGAGAAGTGGCTTAATATGCTTGGCACGTTC HWI-EAS91_1_306UPAAXX + 36 1 0 36 1 0 36 36 GATGAGGAGAAGTGGCTTAATATGCTTGGCACGTTC .................................... 36M 36/36 100.0% 36/36 100.0% 0/36 0.0% 228 NA
36 phiX1 + 2310 2071 2070 2106 36 GTATGTTTCTCCTGCTTATCACCTTCTTGAAGGCTT HWI-EAS91_1_306UPAAXX + 36 1 0 36 1 0 36 36 GTATGTTTCTCCTGCTTATCACCTTCTTGAAGGCTT .................................... 36M 36/36 100.0% 36/36 100.0% 0/36 0.0% 2070 NA
35 phiX1 + 2310 1067 1066 1101 35 GGTCAGATTGGTCGTCTTATTACCATTTCAACTAC HWI-EAS91_1_306UPAAXX - 36 2 1 36 1 0 35 35 GGTCAGATTGGTCGTCTTATTACCATTTCAACTAC ................................... 35M 35/35 100.0% 35/36 97.2% 0/35 0.0% 1065 NA
@@ -207,7 +206,6 @@
36 phiX1 + 2310 333 332 368 36 AGAGCGTGGATTACTATCTGAGTCCGATGCTGTTCA HWI-EAS91_1_306UPAAXX - 36 1 0 36 1 0 36 36 AGAGCGTGGATTACTATCTGAGTCCGATGCTGTTCA .................................... 36M 36/36 100.0% 36/36 100.0% 0/36 0.0% 332 NA
36 phiX1 + 2310 72 71 107 36 ATAAAGCAGGAATTACTACTGCTTGTTTACGAATTA HWI-EAS91_1_306UPAAXX - 36 1 0 36 1 0 36 36 ATAAAGCAGGAATTACTACTGCTTGTTTACGAATTA .................................... 36M 36/36 100.0% 36/36 100.0% 0/36 0.0% 71 NA
36 phiX1 + 2310 1974 1973 2009 36 CGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTT HWI-EAS91_1_306UPAAXX - 36 1 0 36 1 0 36 36 CGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTT .................................... 36M 36/36 100.0% 36/36 100.0% 0/36 0.0% 1973 NA
-#score name1 strand1 size1 start1 zstart1 end1 length1 text1 name2 strand2 size2 start2 zstart2 end2 start2+ zstart2+ end2+ length2 text2 diff cigar identity idPct coverage covPct gaprate gapPct diagonal shingle
35 phiX2 + 3076 1985 1984 2019 35 CCCCCAACTTGATATTAATAACACTATAGACCACC HWI-EAS91_1_306UPAAXX - 36 2 1 36 1 0 35 35 CCCCCAACTTGATATTAATAACACTATAGACCACC ................................... 35M 35/35 100.0% 35/36 97.2% 0/35 0.0% 1983 NA
36 phiX2 + 3076 1212 1211 1247 36 ATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGC HWI-EAS91_1_306UPAAXX - 36 1 0 36 1 0 36 36 ATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGC .................................... 36M 36/36 100.0% 36/36 100.0% 0/36 0.0% 1211 NA
33 phiX2 + 3076 2253 2252 2285 33 GGTTGGTTTATCGTTTTTGACACTCTCACGTTG HWI-EAS91_1_306UPAAXX + 36 1 0 33 1 0 33 33 GGTTGGTTTATCGTTTTTGACACTCTCACGTTG ................................. 33M 33/33 100.0% 33/36 91.7% 0/33 0.0% 2252 NA
diff -r 14214b45db3f -r 119315b57656 tools/sr_mapping/lastz_wrapper.py
--- a/tools/sr_mapping/lastz_wrapper.py Mon Dec 07 14:45:47 2009 -0500
+++ b/tools/sr_mapping/lastz_wrapper.py Mon Dec 07 15:57:06 2009 -0500
@@ -2,7 +2,7 @@
"""
Runs Lastz
-Written for Lastz v. 1.01.86.
+Written for Lastz v. 1.01.88.
usage: lastz_wrapper.py [options]
--ref_name: The reference name to change all output matches to
@@ -132,7 +132,7 @@
input2 = options.input2
if options.format == 'tabular':
# Change output format to general if it's tabular and add field names for tabular output
- format = 'general'
+ format = 'general-'
tabular_fields = ':score,name1,strand1,size1,start1,zstart1,end1,length1,text1,name2,strand2,size2,start2,zstart2,end2,start2+,zstart2+,end2+,length2,text2,diff,cigar,identity,coverage,gaprate,diagonal,shingle'
elif options.format == 'sam':
# We currently ALWAYS suppress SAM headers.
diff -r 14214b45db3f -r 119315b57656 tools/sr_mapping/lastz_wrapper.xml
--- a/tools/sr_mapping/lastz_wrapper.xml Mon Dec 07 14:45:47 2009 -0500
+++ b/tools/sr_mapping/lastz_wrapper.xml Mon Dec 07 15:57:06 2009 -0500
@@ -1,4 +1,4 @@
-<tool id="lastz_wrapper_2" name="Lastz" version="1.0.0">
+<tool id="lastz_wrapper_2" name="Lastz" version="1.1.0">
<description> map short reads against reference sequence</description>
<command interpreter="python">lastz_wrapper.py
#if $seq_name.how_to_name=="yes":
@@ -179,7 +179,17 @@
<param name="num_threads" value="4" />
<output name="output1" file="lastz_wrapper_out3.tabular" />
</test>
- <test>
+ <test>
+ <!--
+ Lastz command: first you will need to split the file phiX_split.fasta into two files,
+ phiX1.fasta and phiX2.fasta, each with 1 sequence (phiX1 and phiX2, respectively). Then:
+ lastz phiX1.fasta test-data/b1.fasta *yasra95short *ambiguousn *nolaj *identity=0..100 *coverage=0 *format=general-:score,name1,strand1,size1,start1,zstart1,end1,length1,text1,name2,strand2,size2,start2,zstart2,end2,start2+,zstart2+,end2+,length2,text2,diff,cigar,identity,coverage,gaprate,diagonal,shingle > lastz_wrapper_out4.tabular
+ lastz phiX2.fasta test-data/b1.fasta *yasra95short *ambiguousn *nolaj *identity=0..100 *coverage=0 *format=general-:score,name1,strand1,size1,start1,zstart1,end1,length1,text1,name2,strand2,size2,start2,zstart2,end2,start2+,zstart2+,end2+,length2,text2,diff,cigar,identity,coverage,gaprate,diagonal,shingle >> lastz_wrapper_out4.tabular
+ You need to point to phiX1.fasta and phiX2.fasta somewhere on your system.
+ phiX_split.fasta and b1.fasta are located in galaxy's test-data
+ You will have to replace all the asterisks before the commands with 2 dashes,
+ as double-dash can't appear in an XML comment
+ -->
<param name="input2" value="b1.fasta" ftype="fasta" />
<param name="ref_source" value="history" />
<param name="input1" value="phiX_split.fasta" ftype="fasta" />
1
0
11 Dec '09
details: http://www.bx.psu.edu/hg/galaxy/rev/8feff3bc14bc
changeset: 3155:8feff3bc14bc
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Mon Dec 07 16:04:33 2009 -0500
description:
Fix for uploading a Bam file that has not yet been sorted, the call to samtools sort has been moved from the sam_to_bam tool to the Bam()set_meta() method to ensure all Bam datasets are sorted prior to indexing. Added new functional tests to cover uploaing unsorted Bam files. Also cleaned up code in the upload tool for uploaing various binary data formats.
diffstat:
lib/galaxy/datatypes/binary.py | 92 ++++++++++++++++++----
lib/galaxy/datatypes/sniff.py | 3 +
lib/galaxy/datatypes/test/3.bam |
test-data/3.bam |
test/functional/test_get_data.py | 26 ++++++-
tools/data_source/upload.py | 138 +++++++++++++++-------------------
tools/samtools/sam_to_bam.py | 21 +----
tools/samtools/sam_to_bam.xml | 4 -
8 files changed, 164 insertions(+), 120 deletions(-)
diffs (440 lines):
diff -r 119315b57656 -r 8feff3bc14bc lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py Mon Dec 07 15:57:06 2009 -0500
+++ b/lib/galaxy/datatypes/binary.py Mon Dec 07 16:04:33 2009 -0500
@@ -12,7 +12,6 @@
log = logging.getLogger(__name__)
-sniffable_binary_formats = [ 'sff', 'bam' ]
# Currently these supported binary data types must be manually set on upload
unsniffable_binary_formats = [ 'ab1', 'scf' ]
@@ -55,29 +54,84 @@
def init_meta( self, dataset, copy_from=None ):
Binary.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, **kwd ):
- """ Sets index for BAM file. """
+ """ Ensures that the Bam file contents are sorted and creates the index for the BAM file. """
+ errors = False
# These metadata values are not accessible by users, always overwrite
index_file = dataset.metadata.bam_index
- if not index_file:
+ if index_file:
+ # If an index file already exists on disk, then the data must have previously been sorted
+ # since samtools requires a sorted Bam file in order to create an index.
+ sorted = os.path.exists( index_file.file_name )
+ else:
index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset = dataset )
+ sorted = False
+ tmp_dir = tempfile.gettempdir()
try:
- # Using a symlink from ~/database/files/dataset_XX.dat, create a temporary file
- # to store the indexex generated from samtools, something like ~/tmp/dataset_XX.dat.bai
- tmp_dir = tempfile.gettempdir()
- tmp_file_path = os.path.join( tmp_dir, os.path.basename( dataset.file_name ) )
- # Here tmp_file_path looks something like /tmp/dataset_XX.dat
- os.symlink( dataset.file_name, tmp_file_path )
- command = 'samtools index %s' % tmp_file_path
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- except:
- err_msg = 'Error creating index file (%s) for BAM file (%s)' % ( str( tmp_file_path ), str( dataset.file_name ) )
+ # Create a symlink from the temporary directory to the dataset file so that samtools can mess with it.
+ tmp_dataset_file_name = os.path.join( tmp_dir, os.path.basename( dataset.file_name ) )
+ # Here tmp_dataset_file_name looks something like /tmp/dataset_XX.dat
+ os.symlink( dataset.file_name, tmp_dataset_file_name )
+ except Exception, e:
+ errors = True
+ err_msg = 'Error creating tmp symlink to file (%s). ' % str( dataset.file_name )
log.exception( err_msg )
- sys.stderr.write( err_msg )
- # Move the temporary index file ~/tmp/dataset_XX.dat.bai to be ~/database/files/_metadata_files/dataset_XX.dat
- shutil.move( '%s.bai' % ( tmp_file_path ), index_file.file_name )
- os.unlink( tmp_file_path )
- dataset.metadata.bam_index = index_file
+ sys.stderr.write( err_msg + str( e ) )
+ if not errors and not sorted:
+ try:
+ # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
+ # TODO: This command may also create temporary files <out.prefix>.%d.bam when the
+ # whole alignment cannot be fitted into memory ( controlled by option -m ). We're
+ # not handling this case here.
+ tmp_sorted_dataset_file = tempfile.NamedTemporaryFile( prefix=tmp_dataset_file_name )
+ tmp_sorted_dataset_file_name = tmp_sorted_dataset_file.name
+ tmp_sorted_dataset_file.close()
+ command = "samtools sort %s %s 2>/dev/null" % ( tmp_dataset_file_name, tmp_sorted_dataset_file_name )
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
+ except Exception, e:
+ errors = True
+ err_msg = 'Error sorting alignments from (%s). ' % tmp_dataset_file_name
+ log.exception( err_msg )
+ sys.stderr.write( err_msg + str( e ) )
+ if not errors:
+ if sorted:
+ try:
+ # Create the Bam index
+ command = 'samtools index %s' % tmp_dataset_file_name
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
+ except Exception, e:
+ errors = True
+ err_msg = 'Error creating index for BAM file (%s)' % str( tmp_dataset_file_name )
+ log.exception( err_msg )
+ sys.stderr.write( err_msg + str( e ) )
+ else:
+ tmp_sorted_bam_file_name = '%s.bam' % tmp_sorted_dataset_file_name
+ try:
+ # Create the Bam index
+ command = 'samtools index %s' % tmp_sorted_bam_file_name
+ proc = subprocess.Popen( args=command, shell=True )
+ proc.wait()
+ except Exception, e:
+ errors = True
+ err_msg = 'Error creating index for BAM file (%s)' % str( tmp_sorted_dataset_file_name )
+ log.exception( err_msg )
+ sys.stderr.write( err_msg + str( e ) )
+ if not errors:
+ if sorted:
+ # Move the temporary index file ~/tmp/dataset_XX.dat.bai to our metadata file
+ # storage location ~/database/files/_metadata_files/dataset_XX.dat
+ shutil.move( '%s.bai' % ( tmp_dataset_file_name ), index_file.file_name )
+ else:
+ # Move tmp_sorted_bam_file_name to our output dataset location
+ shutil.move( tmp_sorted_bam_file_name, dataset.file_name )
+ # Move the temporary sorted index file ~/tmp/dataset_XX.dat.bai to our metadata file
+ # storage location ~/database/files/_metadata_files/dataset_XX.dat
+ shutil.move( '%s.bai' % ( tmp_sorted_bam_file_name ), index_file.file_name )
+ # Remove all remaining temporary files
+ os.unlink( tmp_dataset_file_name )
+ # Set the metadata
+ dataset.metadata.bam_index = index_file
def sniff( self, filename ):
# BAM is compressed in the BGZF format, and must not be uncompressed in Galaxy.
# The first 4 bytes of any bam file is 'BAM\1', and the file is binary.
diff -r 119315b57656 -r 8feff3bc14bc lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py Mon Dec 07 15:57:06 2009 -0500
+++ b/lib/galaxy/datatypes/sniff.py Mon Dec 07 16:04:33 2009 -0500
@@ -255,6 +255,9 @@
>>> fname = get_test_fname('1.bam')
>>> guess_ext(fname)
'bam'
+ >>> fname = get_test_fname('3.bam')
+ >>> guess_ext(fname)
+ 'bam'
"""
if sniff_order is None:
datatypes_registry = registry.Registry()
diff -r 119315b57656 -r 8feff3bc14bc lib/galaxy/datatypes/test/3.bam
Binary file lib/galaxy/datatypes/test/3.bam has changed
diff -r 119315b57656 -r 8feff3bc14bc test-data/3.bam
Binary file test-data/3.bam has changed
diff -r 119315b57656 -r 8feff3bc14bc test/functional/test_get_data.py
--- a/test/functional/test_get_data.py Mon Dec 07 15:57:06 2009 -0500
+++ b/test/functional/test_get_data.py Mon Dec 07 16:04:33 2009 -0500
@@ -521,7 +521,7 @@
self.check_metadata_for_string( 'value="1.axt" value="\?" Change data type selected value="axt" selected="yes"' )
self.delete_history( id=self.security.encode_id( history.id ) )
def test_0150_upload_file( self ):
- """Test uploading 1.bam, NOT setting the file format"""
+ """Test uploading 1.bam, which is a sorted Bam file creaed by the Galaxy sam_to_bam tool, NOT setting the file format"""
self.check_history_for_string( 'Your history is empty' )
history = sa_session.query( galaxy.model.History ) \
.filter( and_( galaxy.model.History.table.c.deleted==False,
@@ -535,8 +535,30 @@
assert hda is not None, "Problem retrieving hda from database"
self.verify_dataset_correctness( '1.bam', hid=str( hda.hid ) )
self.check_history_for_string( '<span class="bam">bam</span>' )
+ # Make sure the Bam index was created
+ assert hda.metadata.bam_index is not None, "Bam index was not correctly created for 1.bam"
self.delete_history( id=self.security.encode_id( history.id ) )
- def test_0155_url_paste( self ):
+ def test_0155_upload_file( self ):
+ """Test uploading 3.bam, which is an unsorted Bam file, NOT setting the file format"""
+ self.check_history_for_string( 'Your history is empty' )
+ history = sa_session.query( galaxy.model.History ) \
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ self.upload_file( '3.bam' )
+ hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert hda is not None, "Problem retrieving hda from database"
+ # Since 3.bam is not sorted, we cannot verify dataset correctness since the uploaded
+ # dataset will be sorted. However, the check below to see if the index was created is
+ # sufficient.
+ self.check_history_for_string( '<span class="bam">bam</span>' )
+ # Make sure the Bam index was created
+ assert hda.metadata.bam_index is not None, "Bam index was not correctly created for 3.bam"
+ self.delete_history( id=self.security.encode_id( history.id ) )
+ def test_0160_url_paste( self ):
"""Test url paste behavior"""
# Logged in as admin_user
# Deleting the current history should have created a new history
diff -r 119315b57656 -r 8feff3bc14bc tools/data_source/upload.py
--- a/tools/data_source/upload.py Mon Dec 07 15:57:06 2009 -0500
+++ b/tools/data_source/upload.py Mon Dec 07 16:04:33 2009 -0500
@@ -9,7 +9,7 @@
# need to import model before sniff to resolve a circular import dependency
import galaxy.model
from galaxy.datatypes import sniff
-from galaxy.datatypes.binary import sniffable_binary_formats, unsniffable_binary_formats
+from galaxy.datatypes.binary import *
from galaxy import util
from galaxy.util.json import *
@@ -61,62 +61,54 @@
if chunk is None:
temp.close()
return False
-def check_binary( temp_name, chunk=None ):
- if chunk is None:
+def check_binary( temp_name ):
+ is_binary = False
+ temp = open( temp_name, "U" )
+ chars_read = 0
+ for chars in temp:
+ for char in chars:
+ chars_read += 1
+ if ord( char ) > 128:
+ is_binary = True
+ break
+ if chars_read > 100:
+ break
+ if chars_read > 100:
+ break
+ temp.close()
+ return is_binary
+def check_bam( temp_name ):
+ return Bam().sniff( temp_name )
+def check_sff( temp_name ):
+ return Sff().sniff( temp_name )
+def check_gzip( temp_name ):
+ # This method returns a tuple of booleans representing ( is_gzipped, is_valid )
+ # Make sure we have a gzipped file
+ try:
temp = open( temp_name, "U" )
- else:
- temp = chunk
- lineno = 0
- for line in temp:
- lineno += 1
- line = line.strip()
- if line:
- for char in line:
- if ord( char ) > 128:
- if chunk is None:
- temp.close()
- return True
- if lineno > 10:
- break
- if chunk is None:
+ magic_check = temp.read( 2 )
temp.close()
- return False
-def check_gzip( temp_name ):
- # This is sort of hacky. BAM is compressed in the BGZF format, and must
- # not be uncompressed in upon upload ( it will be detected as gzipped ).
- # The tuple we're returning from here contains boolean values for
- # ( is_compressed, is_valid, is_bam ).
- temp = open( temp_name, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != util.gzip_magic:
- return ( False, False, False )
+ if magic_check != util.gzip_magic:
+ return ( False, False )
+ except:
+ return ( False, False )
+ # We support some binary data types, so check if the compressed binary file is valid
+ # If the file is Bam, it should already have been detected as such, so we'll just check
+ # for sff format.
+ try:
+ header = gzip.open( temp_name ).read(4)
+ if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ):
+ return ( True, True )
+ except:
+ return( False, False )
CHUNK_SIZE = 2**15 # 32Kb
- gzipped_file = gzip.GzipFile( temp_name )
+ gzipped_file = gzip.GzipFile( temp_name, mode='rb' )
chunk = gzipped_file.read( CHUNK_SIZE )
gzipped_file.close()
+ # See if we have a compressed HTML file
if check_html( temp_name, chunk=chunk ):
- return ( True, False, False )
- if check_binary( temp_name, chunk=chunk ):
- # We do support some binary data types, so check if the compressed binary file is valid
- # We currently only check for [ 'sff', 'bam' ]
- # TODO: this should be fixed to more easily support future-supported binary data types.
- # This is currently just copied from the sniff methods.
- # The first 4 bytes of any bam file is 'BAM\1', and the file is binary.
- try:
- header = gzip.open( temp_name ).read(4)
- if binascii.b2a_hex( header ) == binascii.hexlify( 'BAM\1' ):
- return ( True, True, True )
- except:
- pass
- try:
- header = gzip.open( temp_name ).read(4)
- if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ):
- return ( True, True, False )
- except:
- pass
- return ( True, False, False )
- return ( True, True, False )
+ return ( True, False )
+ return ( True, True )
def check_zip( temp_name ):
if not zipfile.is_zipfile( temp_name ):
return ( False, False, None )
@@ -126,7 +118,7 @@
# 2. All file extensions within an archive must be the same
name = zip_file.namelist()[0]
test_ext = name.split( "." )[1].strip().lower()
- if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
+ if not ( test_ext in unsniffable_binary_formats or test_ext == 'txt' ):
return ( True, False, test_ext )
for name in zip_file.namelist():
ext = name.split( "." )[1].strip().lower()
@@ -163,21 +155,25 @@
dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
except UnicodeDecodeError, e:
dataset.is_multi_byte = False
+ # Is dataset content multi-byte?
if dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
+ # Is dataset content supported sniffable binary?
+ elif check_bam( dataset.path ):
+ ext = 'bam'
+ data_type = 'bam'
+ elif check_sff( dataset.path ):
+ ext = 'sff'
+ data_type = 'sff'
else:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
- is_gzipped, is_valid, is_bam = check_gzip( dataset.path )
+ is_gzipped, is_valid = check_gzip( dataset.path )
if is_gzipped and not is_valid:
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
- elif is_gzipped and is_valid and is_bam:
- ext = 'bam'
- data_type = 'bam'
- elif is_gzipped and is_valid and not is_bam:
- # We need to uncompress the temp_name file, but BAM files must remain compressed
- # in order for samtools to function on them
+ elif is_gzipped and is_valid:
+ # We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
CHUNK_SIZE = 2**20 # 1Mb
fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( dataset.path ), text=False )
gzipped_file = gzip.GzipFile( dataset.path, 'rb' )
@@ -207,7 +203,7 @@
elif is_zipped and is_valid:
# Currently, we force specific tools to handle this case. We also require the user
# to manually set the incoming file_type
- if ( test_ext == 'ab1' or test_ext == 'scf' ) and dataset.file_type != 'binseq.zip':
+ if ( test_ext in unsniffable_binary_formats ) and dataset.file_type != 'binseq.zip':
file_err( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'", dataset, json_file )
return
elif test_ext == 'txt' and dataset.file_type != 'txtseq.zip':
@@ -220,35 +216,25 @@
ext = dataset.file_type
if not data_type:
if check_binary( dataset.path ):
+ # We have a binary dataset, but it is not Bam or Sff
data_type = 'binary'
- binary_ok = False
+ #binary_ok = False
parts = dataset.name.split( "." )
if len( parts ) > 1:
ext = parts[1].strip().lower()
- if ext in unsniffable_binary_formats and dataset.file_type == ext:
- binary_ok = True
+ if ext not in unsniffable_binary_formats:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
elif ext in unsniffable_binary_formats and dataset.file_type != ext:
err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
file_err( err_msg, dataset, json_file )
return
- if not binary_ok and ext in sniffable_binary_formats:
- # Sniff the file to confirm it's data type
- tmp_ext = sniff.guess_ext( dataset.path )
- if tmp_ext == ext:
- binary_ok = True
- else:
- err_msg = "The content of the file does not match its type (%s)." % ext.capitalize()
- file_err( err_msg, dataset, json_file )
- return
- if not binary_ok:
- file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
- return
if not data_type:
# We must have a text file
if check_html( dataset.path ):
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
- if data_type != 'bam' and data_type != 'binary' and data_type != 'zip':
+ if data_type != 'binary' and data_type != 'zip':
if dataset.space_to_tab:
line_count = sniff.convert_newlines_sep2tabs( dataset.path )
else:
diff -r 119315b57656 -r 8feff3bc14bc tools/samtools/sam_to_bam.py
--- a/tools/samtools/sam_to_bam.py Mon Dec 07 15:57:06 2009 -0500
+++ b/tools/samtools/sam_to_bam.py Mon Dec 07 16:04:33 2009 -0500
@@ -79,35 +79,18 @@
tmp_aligns_file = tempfile.NamedTemporaryFile()
tmp_aligns_file_name = tmp_aligns_file.name
tmp_aligns_file.close()
- # IMPORTANT NOTE: for some reason the samtools view command gzips the resulting bam file without warning,
- # and the docs do not currently state that this occurs ( very bad ).
command = "samtools view -bt %s -o %s %s 2>/dev/null" % ( fai_index_file_path, tmp_aligns_file_name, options.input1 )
proc = subprocess.Popen( args=command, shell=True )
proc.wait()
+ shutil.move( tmp_aligns_file_name, options.output1 )
except Exception, e:
stop_err( 'Error extracting alignments from (%s), %s' % ( options.input1, str( e ) ) )
- try:
- # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created. This command
- # may also create temporary files <out.prefix>.%d.bam when the whole alignment cannot be fitted
- # into memory ( controlled by option -m ).
- tmp_sorted_aligns_file = tempfile.NamedTemporaryFile()
- tmp_sorted_aligns_file_name = tmp_sorted_aligns_file.name
- tmp_sorted_aligns_file.close()
- command = "samtools sort %s %s 2>/dev/null" % ( tmp_aligns_file_name, tmp_sorted_aligns_file_name )
- proc = subprocess.Popen( args=command, shell=True )
- proc.wait()
- except Exception, e:
- stop_err( 'Error sorting alignments from (%s), %s' % ( tmp_aligns_file_name, str( e ) ) )
- # Move tmp_aligns_file_name to our output dataset location
- sorted_bam_file = '%s.bam' % tmp_sorted_aligns_file_name
- shutil.move( sorted_bam_file, options.output1 )
+ # NOTE: samtools requires the Bam file to be sorted, but this occurs in Bam().set_meta() to ensure that uploaded Bam files are sorted as well.
if options.ref_file != "None":
# Remove the symlink from /tmp/dataset_13.dat to ~/database/files/000/dataset_13.dat
os.unlink( fai_index_file_path )
# Remove the index file
index_file_name = '%s.fai' % fai_index_file_path
os.unlink( index_file_name )
- # Remove the tmp_aligns_file_name
- os.unlink( tmp_aligns_file_name )
if __name__=="__main__": __main__()
diff -r 119315b57656 -r 8feff3bc14bc tools/samtools/sam_to_bam.xml
--- a/tools/samtools/sam_to_bam.xml Mon Dec 07 15:57:06 2009 -0500
+++ b/tools/samtools/sam_to_bam.xml Mon Dec 07 16:04:33 2009 -0500
@@ -31,10 +31,6 @@
<data name="output1" format="bam"/>
</outputs>
<tests>
- <!--
- # IMPORTANT NOTE: for some reason the samtools view command gzips the resulting bam file without warning,
- # and the docs do not currently state that this occurs ( very bad ).
- -->
<test>
<param name="index_source" value="history" />
<param name="input1" value="3.sam" ftype="sam" />
1
0