galaxy-dev
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10008 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/d872c1e16afb
changeset: 2950:d872c1e16afb
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 12:52:01 2009 -0500
description:
imported patch alchemy05_fixes_02
diffstat:
lib/galaxy/jobs/__init__.py | 6 +++---
lib/galaxy/model/mapping.py | 4 ++--
lib/galaxy/model/mapping_tests.py | 15 ++++++++-------
lib/galaxy/model/migrate/versions/0025_user_info.py | 4 ++--
lib/galaxy/tools/actions/__init__.py | 7 ++++---
lib/galaxy/tools/actions/metadata.py | 5 +++--
lib/galaxy/tools/actions/upload_common.py | 4 +++-
lib/galaxy/web/controllers/async.py | 6 +++---
lib/galaxy/web/controllers/dataset.py | 4 ++--
lib/galaxy/web/controllers/library.py | 6 +++---
lib/galaxy/web/controllers/library_admin.py | 12 ++++++++----
lib/galaxy/web/controllers/requests.py | 2 +-
lib/galaxy/web/controllers/root.py | 10 +++++-----
lib/galaxy/web/controllers/tool_runner.py | 2 --
lib/galaxy/web/framework/__init__.py | 23 ++++++++++++++---------
test/functional/test_forms_and_requests.py | 1 +
test/functional/test_security_and_libraries.py | 2 +-
test/functional/test_user_info.py | 3 ++-
tools/data_source/microbial_import_code.py | 6 +++---
tools/filters/lav_to_bed_code.py | 1 -
tools/maf/maf_to_bed_code.py | 4 +---
21 files changed, 69 insertions(+), 58 deletions(-)
diffs (567 lines):
diff -r 133252175425 -r d872c1e16afb lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/jobs/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -199,7 +199,7 @@
try:
# Clear the session for each job so we get fresh states for
# job and all datasets
- self.sa_session.clear()
+ self.sa_session.expunge_all()
# Get the real job entity corresponding to the wrapper (if we
# are tracking in the database this is probably cached in
# the session from the origianl query above)
@@ -346,7 +346,7 @@
Prepare the job to run by creating the working directory and the
config files.
"""
- self.sa_session.clear() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
+ self.sa_session.expunge_all() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
if not os.path.exists( self.working_directory ):
os.mkdir( self.working_directory )
# Restore parameters from the database
@@ -477,7 +477,7 @@
the contents of the output files.
"""
# default post job setup
- self.sa_session.clear()
+ self.sa_session.expunge_all()
job = self.sa_session.query( model.Job ).get( self.job_id )
# if the job was deleted, don't finish it
if job.state == job.states.DELETED:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/mapping.py Tue Nov 03 12:52:01 2009 -0500
@@ -817,7 +817,7 @@
user=relation( User, backref="roles" ),
non_private_roles=relation( User,
backref="non_private_roles",
- primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email & Role.table.c.type == 'private' ) ) ),
+ primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email ) ) ),
role=relation( Role )
)
)
@@ -1134,7 +1134,7 @@
# Pack everything into a bunch
result = Bunch( **globals() )
result.engine = engine
- result.flush = lambda *args, **kwargs: Session.flush( *args, **kwargs )
+ # model.flush() has been removed.
result.session = Session
# For backward compatibility with "model.context.current"
result.context = Session
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/mapping_tests.py Tue Nov 03 12:52:01 2009 -0500
@@ -19,16 +19,17 @@
d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True )
#h2.queries.append( q1 )
#h2.queries.append( model.Query( "h2->q2" ) )
- model.context.current.flush()
- model.context.current.clear()
+ model.session.add_all( ( u, h1, h2, d1 ) )
+ model.session.flush()
+ model.session.expunge_all()
# Check
- users = model.context.current.query( model.User ).all()
+ users = model.session.query( model.User ).all()
assert len( users ) == 1
assert users[0].email == "james(a)foo.bar.baz"
assert users[0].password == "password"
assert len( users[0].histories ) == 1
assert users[0].histories[0].name == "History 1"
- hists = model.context.current.query( model.History ).all()
+ hists = model.session.query( model.History ).all()
assert hists[0].name == "History 1"
assert hists[1].name == ( "H" * 255 )
assert hists[0].user == users[0]
@@ -38,9 +39,9 @@
assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
# Do an update and check
hists[1].name = "History 2b"
- model.context.current.flush()
- model.context.current.clear()
- hists = model.context.current.query( model.History ).all()
+ model.session.flush()
+ model.session.expunge_all()
+ hists = model.session.query( model.History ).all()
assert hists[0].name == "History 1"
assert hists[1].name == "History 2b"
# gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/migrate/versions/0025_user_info.py
--- a/lib/galaxy/model/migrate/versions/0025_user_info.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0025_user_info.py Tue Nov 03 12:52:01 2009 -0500
@@ -21,7 +21,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
@@ -59,4 +59,4 @@
except Exception, e:
log.debug( "Adding foreign key constraint 'user_form_values_id_fk' to table 'galaxy_user' failed: %s" % ( str( e ) ) )
def downgrade():
- pass
\ No newline at end of file
+ pass
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -235,7 +235,7 @@
# Store output
out_data[ name ] = data
# Store all changes to database
- trans.app.model.flush()
+ trans.sa_session.flush()
# Add all the top-level (non-child) datasets to the history
for name in out_data.keys():
if name not in child_dataset_names and name not in incoming: #don't add children; or already existing datasets, i.e. async created
@@ -248,7 +248,7 @@
child_dataset = out_data[ child_name ]
parent_dataset.children.append( child_dataset )
# Store data after custom code runs
- trans.app.model.flush()
+ trans.sa_session.flush()
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
@@ -274,7 +274,8 @@
job.add_input_dataset( name, None )
for name, dataset in out_data.iteritems():
job.add_output_dataset( name, dataset )
- trans.app.model.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
# Some tools are not really executable, but jobs are still created for them ( for record keeping ).
# Examples include tools that redirect to other applications ( epigraph ). These special tools must
# include something that can be retrieved from the params ( e.g., REDIRECT_URL ) to keep the job
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/metadata.py Tue Nov 03 12:52:01 2009 -0500
@@ -26,7 +26,8 @@
job.tool_version = tool.version
except:
job.tool_version = "1.0.0"
- job.flush() #ensure job.id is available
+ trans.sa_session.add( job )
+ trans.sa_session.flush() #ensure job.id is available
#add parameters to job_parameter table
# Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)?
@@ -49,7 +50,7 @@
#Need a special state here to show that metadata is being set and also allow the job to run
# i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
dataset.state = dataset.states.SETTING_METADATA
- trans.app.model.flush()
+ trans.sa_session.flush()
# Queue the job for execution
trans.app.job_queue.put( job.id, tool )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/upload_common.py Tue Nov 03 12:52:01 2009 -0500
@@ -121,6 +121,7 @@
trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey )
permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
+ trans.sa_session.flush()
return hda
def new_library_upload( trans, uploaded_dataset, library_bunch, state=None ):
@@ -291,7 +292,8 @@
for i, dataset in enumerate( data_list ):
job.add_output_dataset( 'output%i' % i, dataset )
job.state = job.states.NEW
- trans.app.model.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
# Queue the job for execution
trans.app.job_queue.put( job.id, tool )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/async.py Tue Nov 03 12:52:01 2009 -0500
@@ -77,7 +77,7 @@
data.state = data.blurb = jobs.JOB_ERROR
data.info = "Error -> %s" % STATUS
- trans.model.flush()
+ trans.sa_session.flush()
return "Data %s with status %s received. OK" % (data_id, STATUS)
@@ -112,7 +112,7 @@
data.flush()
open( data.file_name, 'wb' ).close() #create the file
trans.history.add_dataset( data, genome_build=GALAXY_BUILD )
- trans.model.flush()
+ trans.sa_session.flush()
trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )
try:
@@ -132,6 +132,6 @@
data.info = str(e)
data.state = data.blurb = data.states.ERROR
- trans.model.flush()
+ trans.sa_session.flush()
return trans.fill_template('tool_executed.tmpl', out_data={}, tool=tool, config=self.app.config )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Tue Nov 03 12:52:01 2009 -0500
@@ -335,7 +335,7 @@
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
data.mark_undeleted()
- self.app.model.flush()
+ trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
return True
return False
@@ -407,7 +407,7 @@
hist.add_dataset( data.copy( copy_children = True ) )
if history in target_histories:
refresh_frames = ['history']
- trans.app.model.flush()
+ trans.sa_session.flush()
done_msg = "%i datasets copied to %i histories." % ( len( source_dataset_ids ) - invalid_datasets, len( target_histories ) )
trans.sa_session.refresh( history )
elif create_new_history:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/library.py Tue Nov 03 12:52:01 2009 -0500
@@ -421,7 +421,7 @@
if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = "Data type changed for library dataset '%s'" % ldda.name
messagetype = 'done'
else:
@@ -463,7 +463,7 @@
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
else:
@@ -486,7 +486,7 @@
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
else:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Tue Nov 03 12:52:01 2009 -0500
@@ -432,7 +432,7 @@
# The user clicked the Save button on the 'Change data type' form
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = "Data type changed for library dataset '%s'" % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -469,7 +469,7 @@
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
@@ -488,7 +488,7 @@
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -674,6 +674,10 @@
replace_id = params.get( 'replace_id', None )
if replace_id not in [ None, 'None' ]:
replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( int( replace_id ) )
+ # The name is separately - by the time the new ldda is created,
+ # replace_dataset.name will point to the new ldda, not the one it's
+ # replacing.
+ replace_dataset_name = replace_dataset.name
if not last_used_build:
last_used_build = replace_dataset.library_dataset_dataset_association.dbkey
# Don't allow multiple datasets to be uploaded when replacing a dataset with a new version
@@ -701,7 +705,7 @@
if created_outputs:
total_added = len( created_outputs.values() )
if replace_dataset:
- msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset.name, folder.name )
+ msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset_name, folder.name )
else:
if not folder.parent:
# Libraries have the same name as their root_folder
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Tue Nov 03 12:52:01 2009 -0500
@@ -294,7 +294,7 @@
# save all the new/unsaved samples entered by the user
if edit_mode == 'False':
for index in range(len(current_samples)-len(request.samples)):
- sample_index = index + len(request.samples)
+ sample_index = len(request.samples)
sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
for field_index in range(len(request.type.sample_form.fields)):
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Tue Nov 03 12:52:01 2009 -0500
@@ -277,7 +277,7 @@
if not __ok_to_edit_metadata( data.id ):
return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." )
trans.app.datatypes_registry.change_datatype( data, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
else:
return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
elif params.save:
@@ -303,7 +303,7 @@
data.datatype.after_edit( data )
else:
msg = ' (Metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.)'
- trans.app.model.flush()
+ trans.sa_session.flush()
return trans.show_ok_message( "Attributes updated%s" % msg, refresh_frames=['history'] )
elif params.detect:
# The user clicked the Auto-detect button on the 'Edit Attributes' form
@@ -322,7 +322,7 @@
msg = 'Attributes updated'
data.set_meta()
data.datatype.after_edit( data )
- trans.app.model.flush()
+ trans.sa_session.flush()
return trans.show_ok_message( msg, refresh_frames=['history'] )
elif params.convert_data:
target_type = kwd.get("target_type", None)
@@ -383,7 +383,7 @@
if job.check_if_output_datasets_deleted():
job.mark_deleted()
self.app.job_manager.job_stop_queue.put( job.id )
- self.app.model.flush()
+ trans.sa_session.flush()
@web.expose
def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
@@ -432,7 +432,7 @@
for dataset in history.datasets:
dataset.deleted = True
dataset.clear_associated_files()
- self.app.model.flush()
+ trans.sa_session.flush()
trans.log_event( "History id %s cleared" % (str(history.id)) )
trans.response.send_redirect( url_for("/index" ) )
@web.expose
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/tool_runner.py Tue Nov 03 12:52:01 2009 -0500
@@ -198,8 +198,6 @@
# pasted data
datasets.append( create_dataset( 'Pasted Entry' ) )
break
- if datasets:
- trans.model.flush()
return [ d.id for d in datasets ]
@web.expose
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/framework/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -139,7 +139,7 @@
self.__galaxy_session = NOT_SET
base.DefaultWebTransaction.__init__( self, environ )
self.setup_i18n()
- self.sa_session.clear()
+ self.sa_session.expunge_all()
self.debug = asbool( self.app.config.get( 'debug', False ) )
# Flag indicating whether we are in workflow building mode (means
# that the current history should not be used for parameter values
@@ -302,12 +302,12 @@
self.galaxy_session = galaxy_session
# Do we need to flush the session?
if galaxy_session_requires_flush:
- objects_to_flush = [ galaxy_session ]
+ sa_session.add( galaxy_session )
# FIXME: If prev_session is a proper relation this would not
# be needed.
if prev_galaxy_session:
- objects_to_flush.append( prev_galaxy_session )
- sa_session.flush( objects_to_flush )
+ sa_session.add( prev_galaxy_session )
+ sa_session.flush()
# If the old session was invalid, get a new history with our new session
if invalidate_existing_session:
self.new_history()
@@ -427,7 +427,8 @@
if not last_accessed:
# Only set default history permissions if current history is not from a previous session
self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.flush( [ prev_galaxy_session, self.galaxy_session, history ] )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
+ self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
def handle_user_logout( self ):
@@ -439,7 +440,8 @@
prev_galaxy_session = self.galaxy_session
prev_galaxy_session.is_valid = False
self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.flush( [ prev_galaxy_session, self.galaxy_session ] )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+ self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
@@ -466,7 +468,8 @@
def set_history( self, history ):
if history and not history.deleted:
self.galaxy_session.current_history = history
- self.sa_session.flush( [ self.galaxy_session ] )
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
history = property( get_history, set_history )
def new_history( self, name=None ):
"""
@@ -489,7 +492,8 @@
# Set the user's default history permissions
self.app.security_agent.history_set_default_permissions( history )
# Save
- self.sa_session.flush( [ self.galaxy_session, history ] )
+ self.sa_session.add_all( ( self.galaxy_session, history ) )
+ self.sa_session.flush()
return history
def get_user( self ):
@@ -498,7 +502,8 @@
def set_user( self, user ):
"""Set the current user."""
self.galaxy_session.user = user
- self.sa_session.flush( [ self.galaxy_session ] )
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
user = property( get_user, set_user )
def get_user_and_roles( self ):
diff -r 133252175425 -r d872c1e16afb test/functional/test_forms_and_requests.py
--- a/test/functional/test_forms_and_requests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_forms_and_requests.py Tue Nov 03 12:52:01 2009 -0500
@@ -29,6 +29,7 @@
.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False ) \
.order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
for fdc in fdc_list:
+ sa_session.refresh( fdc.latest_form )
if form_name == fdc.latest_form.name:
return fdc.latest_form
return None
diff -r 133252175425 -r d872c1e16afb test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_security_and_libraries.py Tue Nov 03 12:52:01 2009 -0500
@@ -156,7 +156,7 @@
raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
# Add a dataset to the history
self.upload_file( '1.bed' )
- latest_dataset = galaxy.model.Dataset.query().order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
+ latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
# Make sure DatasetPermissionss are correct
if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' % \
diff -r 133252175425 -r d872c1e16afb test/functional/test_user_info.py
--- a/test/functional/test_user_info.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_user_info.py Tue Nov 03 12:52:01 2009 -0500
@@ -14,6 +14,7 @@
.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False ) \
.order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
for fdc in fdc_list:
+ sa_session.refresh( fdc.latest_form )
if form_name == fdc.latest_form.name:
return fdc.latest_form
return None
@@ -146,4 +147,4 @@
self.visit_page('forms/manage?show_filter=Deleted')
self.check_page_for_string(form_one_latest.name)
self.logout()
-
\ No newline at end of file
+
diff -r 133252175425 -r d872c1e16afb tools/data_source/microbial_import_code.py
--- a/tools/data_source/microbial_import_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/data_source/microbial_import_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -123,7 +123,7 @@
data = app.datatypes_registry.change_datatype( data, file_type )
data.init_meta()
data.set_peek()
- app.model.flush()
+ data.flush()
elif fields[0] == "#NewFile":
description = fields[1]
chr = fields[2]
@@ -137,7 +137,7 @@
newdata.flush()
app.security_agent.copy_dataset_permissions( base_dataset.dataset, newdata.dataset )
history.add_dataset( newdata )
- app.model.flush()
+ history.flush()
try:
copyfile(filepath,newdata.file_name)
newdata.info = newdata.name
@@ -148,4 +148,4 @@
newdata.dbkey = dbkey
newdata.init_meta()
newdata.set_peek()
- app.model.flush()
+ newdata.flush()
diff -r 133252175425 -r d872c1e16afb tools/filters/lav_to_bed_code.py
--- a/tools/filters/lav_to_bed_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/filters/lav_to_bed_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -16,4 +16,3 @@
data.flush()
except:
continue
- app.model.flush()
\ No newline at end of file
diff -r 133252175425 -r d872c1e16afb tools/maf/maf_to_bed_code.py
--- a/tools/maf/maf_to_bed_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/maf/maf_to_bed_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -21,7 +21,6 @@
output_data.dbkey = dbkey
output_data.name = basic_name + " (" + dbkey + ")"
output_data.flush()
- app.model.flush()
output_data_list.append(output_data)
elif line.startswith("#FILE"):
fields = line.split("\t")
@@ -36,7 +35,6 @@
app.security_agent.copy_dataset_permissions( output_data.dataset, newdata.dataset )
newdata.flush()
history.flush()
- app.model.flush()
try:
move(filepath,newdata.file_name)
newdata.info = newdata.name
@@ -47,7 +45,7 @@
newdata.dbkey = dbkey
newdata.init_meta()
newdata.set_peek()
- app.model.flush()
+ newdata.flush()
output_data_list.append(newdata)
else:
new_stdout = new_stdout + line
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/5b2d593d9aed
changeset: 2957:5b2d593d9aed
user: rc
date: Wed Nov 04 10:04:51 2009 -0500
description:
AMQP messaging server and client files.
diffstat:
scripts/galaxy_messaging/amqp_consumer.py | 94 -------------
scripts/galaxy_messaging/client/amqp_publisher.py | 87 ++++++++++++
scripts/galaxy_messaging/client/galaxy_amq.ini.sample | 32 ++++
scripts/galaxy_messaging/client/report.bat.sample | 1 +
scripts/galaxy_messaging/client/scan.bat.sample | 1 +
scripts/galaxy_messaging/client/scan.sh.sample | 1 +
scripts/galaxy_messaging/client/scanner.py | 92 +++++++++++++
scripts/galaxy_messaging/client/scanner_interface.py | 76 ++++++++++
scripts/galaxy_messaging/galaxydb_interface.py | 151 ---------------------
scripts/galaxy_messaging/server/amqp_consumer.py | 94 +++++++++++++
scripts/galaxy_messaging/server/galaxydb_interface.py | 149 +++++++++++++++++++++
11 files changed, 533 insertions(+), 245 deletions(-)
diffs (829 lines):
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/amqp_consumer.py
--- a/scripts/galaxy_messaging/amqp_consumer.py Wed Nov 04 09:32:09 2009 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-'''
-Galaxy Messaging with AMQP (RabbitMQ)
-Galaxy uses AMQ protocol to receive messages from external sources like
-bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
-For Galaxy to receive messages from a message queue the RabbitMQ server has
-to be set up with a user account and other parameters listed in the [galaxy:amq]
-section in the universe_wsgi.ini config file
-Once the RabbitMQ server has been setup and started with the given parameters,
-this script can be run to receive messages and update the Galaxy database accordingly
-'''
-
-import ConfigParser
-import sys, os
-import optparse
-import xml.dom.minidom
-from galaxydb_interface import GalaxyDbInterface
-
-assert sys.version_info[:2] >= ( 2, 4 )
-new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
-sys.path = new_path
-
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "amqplib" )
-
-from amqplib import client_0_8 as amqp
-
-import logging
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger( 'GalaxyAMQP' )
-
-
-galaxy_config_file = 'universe_wsgi.ini'
-global dbconnstr
-
-def get_value(dom, tag_name):
- '''
- This method extracts the tag value from the xml message
- '''
- nodelist = dom.getElementsByTagName(tag_name)[0].childNodes
- rc = ""
- for node in nodelist:
- if node.nodeType == node.TEXT_NODE:
- rc = rc + node.data
- return rc
-
-def recv_callback(msg):
- dom = xml.dom.minidom.parseString(msg.body)
- barcode = get_value(dom, 'barcode')
- state = get_value(dom, 'state')
- log.debug('Barcode: '+barcode)
- log.debug('State: '+state)
- # update the galaxy db
- galaxy = GalaxyDbInterface(dbconnstr)
- sample_id = galaxy.get_sample_id(field_name='bar_code', value=barcode)
- if sample_id == -1:
- log.debug('Invalid barcode.')
- return
- galaxy.change_state(sample_id, state)
-
-def main():
- config = ConfigParser.ConfigParser()
- config.read(galaxy_config_file)
- global dbconnstr
- dbconnstr = config.get("app:main", "database_connection")
- amqp_config = {}
- for option in config.options("galaxy:amqp"):
- amqp_config[option] = config.get("galaxy:amqp", option)
- log.debug(str(amqp_config))
- conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
- userid=amqp_config['userid'],
- password=amqp_config['password'],
- virtual_host=amqp_config['virtual_host'],
- insist=False)
- chan = conn.channel()
- chan.queue_declare(queue=amqp_config['queue'], durable=True, exclusive=True, auto_delete=False)
- chan.exchange_declare(exchange=amqp_config['exchange'], type="direct", durable=True, auto_delete=False,)
- chan.queue_bind(queue=amqp_config['queue'],
- exchange=amqp_config['exchange'],
- routing_key=amqp_config['routing_key'])
-
- chan.basic_consume(queue=amqp_config['queue'],
- no_ack=True,
- callback=recv_callback,
- consumer_tag="testtag")
- while True:
- chan.wait()
- chan.basic_cancel("testtag")
- chan.close()
- conn.close()
-
-if __name__ == '__main__':
- main()
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/amqp_publisher.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/amqp_publisher.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,87 @@
+'''
+This script gets barcode data from a barcode scanner using serial communication
+and sends the state representated by the barcode scanner & the barcode string
+to the Galaxy LIMS RabbitMQ server. The message is sent in XML which has 2 tags,
+barcode & state. The state of the scanner should be set in the galaxy_amq.ini
+file as a configuration variable.
+'''
+
+from amqplib import client_0_8 as amqp
+import ConfigParser
+import sys, os
+import serial
+import array
+import time
+import optparse
+
+
+xml = \
+''' <sample>
+ <barcode>%(BARCODE)s</barcode>
+ <state>%(STATE)s</state>
+ </sample>'''
+
+
+def handle_scan(states, amqp_config, barcode):
+ if states.get(barcode[:2], None):
+ values = dict( BARCODE=barcode[2:],
+ STATE=states.get(barcode[:2]) )
+ print values
+ data = xml % values
+ print data
+ conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
+ userid=amqp_config['userid'],
+ password=amqp_config['password'],
+ virtual_host=amqp_config['virtual_host'],
+ insist=False)
+ chan = conn.channel()
+ msg = amqp.Message(data)
+ msg.properties["delivery_mode"] = 2
+ chan.basic_publish(msg,
+ exchange=amqp_config['exchange'],
+ routing_key=amqp_config['routing_key'])
+ chan.close()
+ conn.close()
+
+def recv_data(states, amqp_config, s):
+ while True:
+ bytes = s.inWaiting()
+ if bytes:
+ print '%i bytes recvd' % bytes
+ msg = s.read(bytes)
+ print msg
+ handle_scan(states, amqp_config, msg.strip())
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-c', '--config-file', help='config file with all the AMQP config parameters',
+ dest='config_file', action='store')
+ parser.add_option('-p', '--port', help='Name of the port where the scanner is connected',
+ dest='port', action='store')
+ (opts, args) = parser.parse_args()
+ config = ConfigParser.ConfigParser()
+ config.read(opts.config_file)
+ amqp_config = {}
+ states = {}
+ for option in config.options("galaxy:amqp"):
+ amqp_config[option] = config.get("galaxy:amqp", option)
+ count = 1
+ while True:
+ section = 'scanner%i' % count
+ if config.has_section(section):
+ states[config.get(section, 'prefix')] = config.get(section, 'state')
+ count = count + 1
+ else:
+ break
+ print amqp_config
+ print states
+ s = serial.Serial(int(opts.port))
+ print 'Port %s is open: %s' %( opts.port, s.isOpen())
+ recv_data(states, amqp_config, s)
+ s.close()
+ print 'Port %s is open: %s' %( opts.port, s.isOpen())
+
+
+if __name__ == '__main__':
+ main()
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/galaxy_amq.ini.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/galaxy_amq.ini.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,32 @@
+# Galaxy Message Queue
+# Galaxy uses AMQ protocol to receive messages from external sources like
+# bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
+# For Galaxy to receive messages from a message queue the RabbitMQ server has
+# to be set up with a user account and other parameters listed below. The 'host'
+# and 'port' fields should point to where the RabbitMQ server is running.
+
+#[galaxy:amqp]
+#host = 127.0.0.1
+#port = 5672
+#userid = galaxy
+#password = galaxy
+#virtual_host = galaxy_messaging_engine
+#queue = galaxy_queue
+#exchange = galaxy_exchange
+#routing_key = bar_code_scanner
+
+# The following section(s) 'scanner#' is for specifying the state of the
+# sample this scanner represents. This state name should be one of the
+# possible states created for this request type in Galaxy
+# If there multiple scanners attached to this host the add as many "scanner#"
+# sections below each with the name & prefix of the bar code scanner and
+# the state it represents
+#[scanner1]
+#name =
+#state =
+#prefix =
+
+#[scanner2]
+#name =
+#state =
+#prefix =
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/report.bat.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/report.bat.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,1 @@
+python scanner.py -p 2 -c galaxy_amq.ini -r
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scan.bat.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scan.bat.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,1 @@
+python amqp_publisher.py -p 2 -c galaxy_amq.ini
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scan.sh.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scan.sh.sample Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,1 @@
+python amqp_publisher.py -p 3 -c galaxy_amq.ini
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scanner.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scanner.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,92 @@
+import sys, os
+import serial
+import array
+import time
+import optparse
+import ConfigParser, logging
+from scanner_interface import ScannerInterface
+
+logging.basicConfig(level=logging.DEBUG)
+log = logging.getLogger( 'Scanner' )
+
+# command prefix: SYN M CR
+cmd = [22, 77, 13]
+response = { 6: 'ACK', 5: 'ENQ', 21: 'NAK' }
+image_scanner_report = 'RPTSCN.'
+get_prefix1 = 'PREBK2?.'
+get_prefix2 = ':4820:PREBK2?.'
+set_prefix = 'PREBK2995859.'
+clear_prefix = 'PRECA2.'
+
+def get_prefix_cmd(name):
+ return ':' + name + ':' + 'PREBK2?.'
+
+def set_prefix_cmd(name, prefix):
+ prefix_str = ''
+ for c in prefix:
+ prefix_str = prefix_str + hex(ord(c))[2:]
+ return ':' + name + ':' + 'PREBK299' + prefix_str + '!'
+
+def read_config_file(config_file):
+ config = ConfigParser.ConfigParser()
+ config.read(config_file)
+ count = 1
+ scanners_list = []
+ while True:
+ section = 'scanner%i' % count
+ if config.has_section(section):
+ scanner = dict(name=config.get(section, 'name'),
+ prefix=config.get(section, 'prefix'),
+ state=config.get(section, 'state'))
+ scanners_list.append(scanner)
+ count = count + 1
+ else:
+ return scanners_list
+
+def main():
+ usage = "python %s -p PORT -c CONFIG_FILE [ OPTION ]" % sys.argv[0]
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('-p', '--port', help='Name of the port where the scanner is connected',
+ dest='port', action='store')
+ parser.add_option('-c', '--config-file', help='config file with all the AMQP config parameters',
+ dest='config_file', action='store')
+ parser.add_option('-r', '--report', help='scanner report',
+ dest='report', action='store_true', default=False)
+ parser.add_option('-i', '--install', help='install the scanners',
+ dest='install', action='store_true', default=False)
+ (opts, args) = parser.parse_args()
+ # validate
+ if not opts.port:
+ parser.print_help()
+ sys.exit(0)
+ if ( opts.report or opts.install ) and not opts.config_file:
+ parser.print_help()
+ sys.exit(0)
+
+ # create the scanner interface
+ si = ScannerInterface(opts.port)
+ if opts.install:
+ scanners_list = read_config_file(opts.config_file)
+ for scanner in scanners_list:
+ msg = set_prefix_cmd(scanner['name'], scanner['prefix'])
+ si.send(msg)
+ response = si.recv()
+ if not response:
+ log.error("Scanner %s could not be installed." % scanner['name'])
+ elif opts.report:
+ si.send(image_scanner_report)
+ rep = si.recv()
+ log.info(rep)
+ scanners_list = read_config_file(opts.config_file)
+ for scanner in scanners_list:
+ msg = get_prefix_cmd(scanner['name'])
+ si.send(msg)
+ response = si.recv()
+ if response:
+ log.info('PREFIX for scanner %s: %s' % (scanner['name'], chr(int(response[8:12][:2], 16))+chr(int(response[8:12][2:], 16)) ))
+ si.close()
+
+
+
+if __name__ == "__main__":
+ main()
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/client/scanner_interface.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/client/scanner_interface.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,76 @@
+import sys, os
+import serial
+import array
+import time
+import optparse
+import ConfigParser
+import logging
+
+logging.basicConfig(level=logging.INFO)
+log = logging.getLogger( 'ScannerInterface' )
+
+class ScannerInterface( object ):
+ cmdprefix = [22, 77, 13]
+ response = { 6: 'ACK', 5: 'ENQ', 21: 'NAK' }
+
+ def __init__( self, port ):
+ if os.name in ['posix', 'mac']:
+ self.port = port
+ elif os.name == 'nt':
+ self.port = int(port)
+ if self.port:
+ self.open()
+
+ def open(self):
+ try:
+ self.serial_conn = serial.Serial(self.port)
+ except serial.SerialException:
+ log.exception('Unable to open port: %s' % str(self.port))
+ sys.exit(1)
+ log.debug('Port %s is open: %s' %( str(self.port), self.serial_conn.isOpen() ) )
+
+ def is_open(self):
+ return self.serial_conn.isOpen()
+
+ def close(self):
+ self.serial_conn.close()
+ log.debug('Port %s is open: %s' %( str(self.port), self.serial_conn.isOpen() ) )
+
+ def send(self, msg):
+ message = self.cmdprefix + map(ord, msg)
+ byte_array = array.array('B', message)
+ log.debug('Sending message to %s: %s' % ( str(self.port), message) )
+ bytes = self.serial_conn.write( byte_array.tostring() )
+ log.debug('%i bytes out of %i bytes sent to the scanner' % ( bytes, len(message) ) )
+
+ def recv(self):
+ time.sleep(1)
+ self.serial_conn.flush()
+ nbytes = self.serial_conn.inWaiting()
+ log.debug('%i bytes received' % nbytes)
+ if nbytes:
+ msg = self.serial_conn.read(nbytes)
+ byte_array = map(ord, msg)
+ log.debug('Message received [%s]: %s' % (self.response.get(byte_array[len(byte_array)-2], byte_array[len(byte_array)-2]),
+ msg))
+ return msg
+ else:
+ log.error('Error!')
+ return None
+
+ def setup_recv(self, callback):
+ self.recv_callback = callback
+
+ def wait(self):
+ nbytes = self.serial_conn.inWaiting()
+ if nbytes:
+ msg = self.serial_conn.read(nbytes)
+ byte_array = map(ord, msg)
+ log.debug('Message received [%s]: %s' % (self.response.get(byte_array[len(byte_array)-2], byte_array[len(byte_array)-2],
+ msg)))
+ if self.recv_callback:
+ self.recv_callback(msg)
+ return
+
+
+
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/galaxydb_interface.py
--- a/scripts/galaxy_messaging/galaxydb_interface.py Wed Nov 04 09:32:09 2009 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,151 +0,0 @@
-#/usr/bin/python
-
-from datetime import datetime, timedelta
-import sys
-import optparse
-import os
-import time
-import logging
-
-assert sys.version_info[:2] >= ( 2, 4 )
-new_path = [ os.path.join( os.getcwd(), "lib" ) ]
-new_path.extend( sys.path[1:] ) # remove scripts/ from the path
-sys.path = new_path
-from galaxy import eggs
-import pkg_resources
-pkg_resources.require( "psycopg2" )
-import psycopg2
-pkg_resources.require( "SQLAlchemy >= 0.4" )
-from sqlalchemy import *
-from sqlalchemy.orm import sessionmaker
-
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger( 'GalaxyDbInterface' )
-
-class GalaxyDbInterface(object):
-
- def __init__(self, dbstr):
- self.dbstr = dbstr
- self.db_engine = create_engine(self.dbstr)
-# self.db_engine.echo = True
- self.metadata = MetaData(self.db_engine)
- self.session = sessionmaker(bind=self.db_engine)
- self.event_table = Table('sample_event', self.metadata, autoload=True )
- self.sample_table = Table('sample', self.metadata, autoload=True )
- self.request_table = Table('request', self.metadata, autoload=True )
- self.state_table = Table('sample_state', self.metadata, autoload=True )
-
- def get_sample_id(self, field_name='bar_code', value=None):
- if not value:
- return -1
- sample_id = -1
- if field_name =='name':
- stmt = select(columns=[self.sample_table.c.id],
- whereclause=self.sample_table.c.name==value)
- result = stmt.execute()
- sample_id = result.fetchone()[0]
- elif field_name == 'bar_code':
- stmt = select(columns=[self.sample_table.c.id],
- whereclause=self.sample_table.c.bar_code==value)
- result = stmt.execute()
- x = result.fetchone()
- if x:
- sample_id = x[0]
- log.debug('Sample ID: %i' % sample_id)
- return sample_id
- log.warning('This sample %s %s does not belong to any sample in the database.' % (field_name, value))
- return -1
-
- def current_state(self, sample_id):
- '''
- This method returns the current state of the sample for the given sample_id
- '''
- stmt = select(columns=[self.event_table.c.sample_state_id],
- whereclause=self.event_table.c.sample_id==sample_id,
- order_by=self.event_table.c.update_time.desc())
- result = stmt.execute()
- all_states = result.fetchall()
- current_state_id = all_states[0][0]
- return current_state_id
-
- def all_possible_states(self, sample_id):
- subsubquery = select(columns=[self.sample_table.c.request_id],
- whereclause=self.sample_table.c.id==sample_id)
- self.request_id = subsubquery.execute().fetchall()[0][0]
- log.debug('REQUESTID: %i' % self.request_id)
- subquery = select(columns=[self.request_table.c.request_type_id],
- whereclause=self.request_table.c.id==self.request_id)
- request_type_id = subquery.execute().fetchall()[0][0]
- log.debug('REQUESTTYPEID: %i' % request_type_id)
- query = select(columns=[self.state_table.c.id, self.state_table.c.name],
- whereclause=self.state_table.c.request_type_id==request_type_id,
- order_by=self.state_table.c.id.asc())
- states = query.execute().fetchall()
- log.debug('POSSIBLESTATES: '+ str(states))
- return states
-
- def change_state(self, sample_id, new_state=None):
- '''
- This method changes the state of the sample to the the 'new_state'
- '''
- if not new_state:
- return
- new_state_id = -1
- # find the state_id for this new state in the list of possible states
- possible_states = self.all_possible_states(sample_id)
- for state_id, state_name in possible_states:
- if new_state == state_name:
- new_state_id = state_id
- if new_state_id == -1:
- return
- log.debug('Updating sample_id %i state to %s' % (sample_id, new_state))
- d = timedelta(hours=4)
- i = self.event_table.insert()
- i.execute(update_time=datetime.now()+d,
- create_time=datetime.now()+d,
- sample_id=sample_id,
- sample_state_id=int(new_state_id),
- comment='bar code scanner')
- # if all the samples for this request are in the final state
- # then change the request state to 'Complete'
- result = select(columns=[self.sample_table.c.id],
- whereclause=self.sample_table.c.request_id==self.request_id).execute()
- sample_id_list = result.fetchall()
- request_complete = True
- for sid in sample_id_list:
- current_state_id = self.current_state(sid[0])
- if current_state_id != possible_states[-1][0]:
- request_complete = False
- break
- if request_complete:
- request_state = 'Complete'
- else:
- request_state = 'Submitted'
- log.debug('Updating request_id %i state to "%s"' % (self.request_id, request_state))
- d = timedelta(hours=4)
- i = self.request_table.update(whereclause=self.request_table.c.id==self.request_id,
- values={self.request_table.c.state: request_state})
- i.execute()
-
-
-
-if __name__ == '__main__':
- print '''This file should not be run directly. To start the Galaxy AMQP Listener:
- %sh run_galaxy_listener.sh'''
-# dbstr = 'postgres://postgres:postgres@localhost/galaxy_ft'
-#
-# parser = optparse.OptionParser()
-# parser.add_option('-n', '--name', help='name of the sample field', dest='name', \
-# action='store', default='bar_code')
-# parser.add_option('-v', '--value', help='value of the sample field', dest='value', \
-# action='store')
-# parser.add_option('-s', '--state', help='new state of the sample', dest='state', \
-# action='store')
-# (opts, args) = parser.parse_args()
-#
-# gs = GalaxyDbInterface(dbstr)
-# sample_id = gs.get_sample_id(field_name=opts.name, value=opts.value)
-# gs.change_state(sample_id, opts.state)
-
-
-
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/server/amqp_consumer.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/server/amqp_consumer.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,94 @@
+'''
+Galaxy Messaging with AMQP (RabbitMQ)
+Galaxy uses AMQ protocol to receive messages from external sources like
+bar code scanners. Galaxy has been tested against RabbitMQ AMQP implementation.
+For Galaxy to receive messages from a message queue the RabbitMQ server has
+to be set up with a user account and other parameters listed in the [galaxy:amq]
+section in the universe_wsgi.ini config file
+Once the RabbitMQ server has been setup and started with the given parameters,
+this script can be run to receive messages and update the Galaxy database accordingly
+'''
+
+import ConfigParser
+import sys, os
+import optparse
+import xml.dom.minidom
+from galaxydb_interface import GalaxyDbInterface
+
+assert sys.version_info[:2] >= ( 2, 4 )
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( "amqplib" )
+
+from amqplib import client_0_8 as amqp
+
+import logging
+logging.basicConfig(level=logging.DEBUG)
+log = logging.getLogger( 'GalaxyAMQP' )
+
+
+galaxy_config_file = 'universe_wsgi.ini'
+global dbconnstr
+
+def get_value(dom, tag_name):
+ '''
+ This method extracts the tag value from the xml message
+ '''
+ nodelist = dom.getElementsByTagName(tag_name)[0].childNodes
+ rc = ""
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc
+
+def recv_callback(msg):
+ dom = xml.dom.minidom.parseString(msg.body)
+ barcode = get_value(dom, 'barcode')
+ state = get_value(dom, 'state')
+ log.debug('Barcode: '+barcode)
+ log.debug('State: '+state)
+ # update the galaxy db
+ galaxy = GalaxyDbInterface(dbconnstr)
+ sample_id = galaxy.get_sample_id(field_name='bar_code', value=barcode)
+ if sample_id == -1:
+ log.debug('Invalid barcode.')
+ return
+ galaxy.change_state(sample_id, state)
+
+def main():
+ config = ConfigParser.ConfigParser()
+ config.read(galaxy_config_file)
+ global dbconnstr
+ dbconnstr = config.get("app:main", "database_connection")
+ amqp_config = {}
+ for option in config.options("galaxy:amqp"):
+ amqp_config[option] = config.get("galaxy:amqp", option)
+ log.debug(str(amqp_config))
+ conn = amqp.Connection(host=amqp_config['host']+":"+amqp_config['port'],
+ userid=amqp_config['userid'],
+ password=amqp_config['password'],
+ virtual_host=amqp_config['virtual_host'],
+ insist=False)
+ chan = conn.channel()
+ chan.queue_declare(queue=amqp_config['queue'], durable=True, exclusive=True, auto_delete=False)
+ chan.exchange_declare(exchange=amqp_config['exchange'], type="direct", durable=True, auto_delete=False,)
+ chan.queue_bind(queue=amqp_config['queue'],
+ exchange=amqp_config['exchange'],
+ routing_key=amqp_config['routing_key'])
+
+ chan.basic_consume(queue=amqp_config['queue'],
+ no_ack=True,
+ callback=recv_callback,
+ consumer_tag="testtag")
+ while True:
+ chan.wait()
+ chan.basic_cancel("testtag")
+ chan.close()
+ conn.close()
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff -r 984b1eb6c428 -r 5b2d593d9aed scripts/galaxy_messaging/server/galaxydb_interface.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/galaxy_messaging/server/galaxydb_interface.py Wed Nov 04 10:04:51 2009 -0500
@@ -0,0 +1,149 @@
+#/usr/bin/python
+
+from datetime import datetime
+import sys
+import optparse
+import os
+import time
+import logging
+
+assert sys.version_info[:2] >= ( 2, 4 )
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( "psycopg2" )
+import psycopg2
+pkg_resources.require( "SQLAlchemy >= 0.4" )
+from sqlalchemy import *
+from sqlalchemy.orm import sessionmaker
+
+logging.basicConfig(level=logging.DEBUG)
+log = logging.getLogger( 'GalaxyDbInterface' )
+
+class GalaxyDbInterface(object):
+
+ def __init__(self, dbstr):
+ self.dbstr = dbstr
+ self.db_engine = create_engine(self.dbstr)
+# self.db_engine.echo = True
+ self.metadata = MetaData(self.db_engine)
+ self.session = sessionmaker(bind=self.db_engine)
+ self.event_table = Table('sample_event', self.metadata, autoload=True )
+ self.sample_table = Table('sample', self.metadata, autoload=True )
+ self.request_table = Table('request', self.metadata, autoload=True )
+ self.state_table = Table('sample_state', self.metadata, autoload=True )
+
+ def get_sample_id(self, field_name='bar_code', value=None):
+ if not value:
+ return -1
+ sample_id = -1
+ if field_name =='name':
+ stmt = select(columns=[self.sample_table.c.id],
+ whereclause=self.sample_table.c.name==value)
+ result = stmt.execute()
+ sample_id = result.fetchone()[0]
+ elif field_name == 'bar_code':
+ stmt = select(columns=[self.sample_table.c.id],
+ whereclause=self.sample_table.c.bar_code==value)
+ result = stmt.execute()
+ x = result.fetchone()
+ if x:
+ sample_id = x[0]
+ log.debug('Sample ID: %i' % sample_id)
+ return sample_id
+ log.warning('This sample %s %s does not belong to any sample in the database.' % (field_name, value))
+ return -1
+
+ def current_state(self, sample_id):
+ '''
+ This method returns the current state of the sample for the given sample_id
+ '''
+ stmt = select(columns=[self.event_table.c.sample_state_id],
+ whereclause=self.event_table.c.sample_id==sample_id,
+ order_by=self.event_table.c.update_time.desc())
+ result = stmt.execute()
+ all_states = result.fetchall()
+ current_state_id = all_states[0][0]
+ return current_state_id
+
+ def all_possible_states(self, sample_id):
+ subsubquery = select(columns=[self.sample_table.c.request_id],
+ whereclause=self.sample_table.c.id==sample_id)
+ self.request_id = subsubquery.execute().fetchall()[0][0]
+ log.debug('REQUESTID: %i' % self.request_id)
+ subquery = select(columns=[self.request_table.c.request_type_id],
+ whereclause=self.request_table.c.id==self.request_id)
+ request_type_id = subquery.execute().fetchall()[0][0]
+ log.debug('REQUESTTYPEID: %i' % request_type_id)
+ query = select(columns=[self.state_table.c.id, self.state_table.c.name],
+ whereclause=self.state_table.c.request_type_id==request_type_id,
+ order_by=self.state_table.c.id.asc())
+ states = query.execute().fetchall()
+ log.debug('POSSIBLESTATES: '+ str(states))
+ return states
+
+ def change_state(self, sample_id, new_state=None):
+ '''
+ This method changes the state of the sample to the the 'new_state'
+ '''
+ if not new_state:
+ return
+ new_state_id = -1
+ # find the state_id for this new state in the list of possible states
+ possible_states = self.all_possible_states(sample_id)
+ for state_id, state_name in possible_states:
+ if new_state == state_name:
+ new_state_id = state_id
+ if new_state_id == -1:
+ return
+ log.debug('Updating sample_id %i state to %s' % (sample_id, new_state))
+ i = self.event_table.insert()
+ i.execute(update_time=datetime.utcnow(),
+ create_time=datetime.utcnow(),
+ sample_id=sample_id,
+ sample_state_id=int(new_state_id),
+ comment='bar code scanner')
+ # if all the samples for this request are in the final state
+ # then change the request state to 'Complete'
+ result = select(columns=[self.sample_table.c.id],
+ whereclause=self.sample_table.c.request_id==self.request_id).execute()
+ sample_id_list = result.fetchall()
+ request_complete = True
+ for sid in sample_id_list:
+ current_state_id = self.current_state(sid[0])
+ if current_state_id != possible_states[-1][0]:
+ request_complete = False
+ break
+ if request_complete:
+ request_state = 'Complete'
+ else:
+ request_state = 'Submitted'
+ log.debug('Updating request_id %i state to "%s"' % (self.request_id, request_state))
+ i = self.request_table.update(whereclause=self.request_table.c.id==self.request_id,
+ values={self.request_table.c.state: request_state})
+ i.execute()
+
+
+
+if __name__ == '__main__':
+ print '''This file should not be run directly. To start the Galaxy AMQP Listener:
+ %sh run_galaxy_listener.sh'''
+ dbstr = 'postgres://postgres:postgres@localhost/galaxy_uft'
+
+ parser = optparse.OptionParser()
+ parser.add_option('-n', '--name', help='name of the sample field', dest='name', \
+ action='store', default='bar_code')
+ parser.add_option('-v', '--value', help='value of the sample field', dest='value', \
+ action='store')
+ parser.add_option('-s', '--state', help='new state of the sample', dest='state', \
+ action='store')
+ (opts, args) = parser.parse_args()
+
+ gs = GalaxyDbInterface(dbstr)
+ sample_id = gs.get_sample_id(field_name=opts.name, value=opts.value)
+ gs.change_state(sample_id, opts.state)
+
+
+
1
0

07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/6491acd0bef2
changeset: 2952:6491acd0bef2
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 15:39:51 2009 -0500
description:
Fix a bug in streaming library tarballs. Quoth James:
This bears repeating (heh, again... -nate) -- NEVER use a mutable object (like
a dict) as a function parameter default. The default is evaluated once (at def
time) and then used for every call to the function.
This is really bad since every call, across multiple thread, are writing to
that one dict. In this case, it resulted in the values of parameters that have
dependencies being persisted across requests. Took me three solid days to
find!
diffstat:
lib/galaxy/util/streamball.py | 6 ++++--
1 files changed, 4 insertions(+), 2 deletions(-)
diffs (17 lines):
diff -r 2300a80d80e5 -r 6491acd0bef2 lib/galaxy/util/streamball.py
--- a/lib/galaxy/util/streamball.py Tue Nov 03 13:04:35 2009 -0500
+++ b/lib/galaxy/util/streamball.py Tue Nov 03 15:39:51 2009 -0500
@@ -7,9 +7,11 @@
log = logging.getLogger( __name__ )
class StreamBall( object ):
- def __init__( self, mode, members={} ):
+ def __init__( self, mode, members=None ):
+ self.members = members
+ if members is None:
+ self.members = {}
self.mode = mode
- self.members = members
self.wsgi_status = None
self.wsgi_headeritems = None
def add( self, file, relpath ):
1
0

07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/984b1eb6c428
changeset: 2956:984b1eb6c428
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Nov 04 09:32:09 2009 -0500
description:
When reseting password, if an email has not yet been provided, make text box empty and not contain 'None'
diffstat:
lib/galaxy/web/controllers/user.py | 2 ++
1 files changed, 2 insertions(+), 0 deletions(-)
diffs (12 lines):
diff -r d7c66019de13 -r 984b1eb6c428 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Tue Nov 03 19:16:01 2009 -0500
+++ b/lib/galaxy/web/controllers/user.py Wed Nov 04 09:32:09 2009 -0500
@@ -571,6 +571,8 @@
return trans.show_ok_message( "Password has been reset and emailed to: %s. <a href='%s'>Click here</a> to return to the login form." % ( email, web.url_for( action='login' ) ) )
elif email != None:
error = "The specified user does not exist"
+ elif email is None:
+ email = ""
return trans.show_form(
web.FormBuilder( web.url_for(), "Reset Password", submit_text="Submit" )
.add_text( "email", "Email", value=email, error=error ) )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/de764999c5af
changeset: 2948:de764999c5af
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Nov 03 13:00:36 2009 -0500
description:
Pack recently modified scripts.
diffstat:
static/scripts/packed/autocomplete_tagging.js | 2 +-
static/scripts/packed/trackster.js | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diffs (16 lines):
diff -r f776fa6045ba -r de764999c5af static/scripts/packed/autocomplete_tagging.js
--- a/static/scripts/packed/autocomplete_tagging.js Tue Nov 03 12:58:13 2009 -0500
+++ b/static/scripts/packed/autocomplete_tagging.js Tue Nov 03 13:00:36 2009 -0500
@@ -1,1 +1,1 @@
-var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u,v){},editable:true,input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>").text(u).addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};v
ar s=b();if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E
.after(D)}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>").text(u).addClass("tag-name");w.click(function(){tag_name_and_value=u.split(":");p.tag_click_fn(tag_name_and_value[0],tag_name_and_value[1]);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);if(p.editable){x.append(v)}return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+escape(v)+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+escape(v)+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true)
{return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<3){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,fo
rmatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});if(p.editable){m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false})}if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
+var ac_tag_area_id_gen=1;jQuery.fn.autocomplete_tagging=function(c){var e={get_toggle_link_text_fn:function(u){var w="";var v=o(u);if(v!=0){w=v+(v!=0?" Tags":" Tag")}else{w="Add tags"}return w},tag_click_fn:function(u,v){},editable:true,input_size:20,in_form:false,tags:{},use_toggle_link:true,item_id:"",add_tag_img:"",add_tag_img_rollover:"",delete_tag_img:"",ajax_autocomplete_tag_url:"",ajax_retag_url:"",ajax_delete_tag_url:"",ajax_add_tag_url:""};var p=jQuery.extend(e,c);var k="tag-area-"+(ac_tag_area_id_gen)++;var m=$("<div>").attr("id",k).addClass("tag-area");this.append(m);var o=function(u){if(u.length){return u.length}var v=0;for(element in u){v++}return v};var b=function(){var u=p.get_toggle_link_text_fn(p.tags);var v=$("<a href='/history/tags'>").text(u).addClass("toggle-link");v.click(function(){var w=(m.css("display")=="none");var x;if(w){x=function(){var y=o(p.tags);if(y==0){m.click()}}}else{x=function(){m.blur()}}m.slideToggle("fast",x);return false});return v};v
ar s=b();if(p.use_toggle_link){this.prepend(s)}var t=function(u){var v=new Array();for(key in u){v[v.length]=key+"-->"+u[key]}return"{"+v.join(",")+"}"};var a=function(v,u){return v+((u!=""&&u)?":"+u:"")};var h=function(u){return u.split(":")};var i=function(u){var v=$("<img src='"+p.add_tag_img+"' rollover='"+p.add_tag_img_rollover+"'/>").addClass("add-tag-button");v.click(function(){$(this).hide();m.click();return false});return v};var j=function(u){var v=$("<img src='"+p.delete_tag_img+"'/>").addClass("delete-tag-img");v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)});v.click(function(){var D=$(this).parent();var C=D.find(".tag-name").eq(0);var B=C.text();var z=h(B);var F=z[0];var y=z[1];var E=D.prev();D.remove();delete p.tags[F];var A=p.get_toggle_link_text_fn(p.tags);s.text(A);$.ajax({url:p.ajax_delete_tag_url,data:{tag_name:F},error:function(){p.tags[F]=y;if(E.hasClass("tag-button")){E
.after(D)}else{m.prepend(D)}var G=p.get_toggle_link_text_fn(p.tags);alert("Remove tag failed");s.text(G);v.mouseenter(function(){$(this).attr("src",p.delete_tag_img_rollover)});v.mouseleave(function(){$(this).attr("src",p.delete_tag_img)})},success:function(){}});return true});var w=$("<span>").text(u).addClass("tag-name");w.click(function(){tag_name_and_value=u.split(":");p.tag_click_fn(tag_name_and_value[0],tag_name_and_value[1]);return true});var x=$("<span></span>").addClass("tag-button");x.append(w);if(p.editable){x.append(v)}return x};var d=function(v){var u;if(p.in_form){u=$("<textarea id='history-tag-input' rows='1' cols='"+p.input_size+"' value='"+escape(v)+"'></textarea>")}else{u=$("<input id='history-tag-input' type='text' size='"+p.input_size+"' value='"+escape(v)+"'></input>")}u.keyup(function(D){if(D.keyCode==27){$(this).trigger("blur")}else{if((D.keyCode==13)||(D.keyCode==188)||(D.keyCode==32)){new_value=this.value;if(return_key_pressed_for_autocomplete==true)
{return_key_pressed_for_autocomplete=false;return false}if(new_value.indexOf(": ",new_value.length-2)!=-1){this.value=new_value.substring(0,new_value.length-1);return false}if((D.keyCode==188)||(D.keyCode==32)){new_value=new_value.substring(0,new_value.length-1)}new_value=new_value.replace(/^\s+|\s+$/g,"");if(new_value.length<2){return false}this.value="";var A=j(new_value);var z=m.children(".tag-button");if(z.length!=0){var E=z.slice(z.length-1);E.after(A)}else{m.prepend(A)}var y=new_value.split(":");p.tags[y[0]]=y[1];var B=p.get_toggle_link_text_fn(p.tags);s.text(B);var C=$(this);$.ajax({url:p.ajax_add_tag_url,data:{new_tag:new_value},error:function(){A.remove();delete p.tags[y[0]];var F=p.get_toggle_link_text_fn(p.tags);s.text(F);alert("Add tag failed")},success:function(){C.flushCache()}});return false}}});var w=function(A,z,y,C,B){tag_name_and_value=C.split(":");return(tag_name_and_value.length==1?tag_name_and_value[0]:tag_name_and_value[1])};var x={selectFirst:false,fo
rmatItem:w,autoFill:false,highlight:false};u.autocomplete(p.ajax_autocomplete_tag_url,x);u.addClass("tag-input");return u};for(tag_name in p.tags){var q=p.tags[tag_name];var l=a(tag_name,q);var g=j(l,s,p.tags);m.append(g)}var n=d("");var f=i(n);m.blur(function(u){r=o(p.tags);if(r!=0){f.show();n.hide();m.removeClass("active-tag-area")}else{}});if(p.editable){m.append(f);m.append(n);n.hide();m.click(function(w){var v=$(this).hasClass("active-tag-area");if($(w.target).hasClass("delete-tag-img")&&!v){return false}if($(w.target).hasClass("tag-name")&&!v){return false}$(this).addClass("active-tag-area");f.hide();n.show();n.focus();var u=function(y){var x=m.attr("id");if(($(y.target).attr("id")!=x)&&($(y.target).parents().filter(x).length==0)){m.blur();$(document).unbind("click",u)}};$(window).click(u);return false})}if(p.use_toggle_link){m.hide()}else{var r=o(p.tags);if(r==0){f.hide();n.show()}}return this.addClass("tag-element")};
\ No newline at end of file
diff -r f776fa6045ba -r de764999c5af static/scripts/packed/trackster.js
--- a/static/scripts/packed/trackster.js Tue Nov 03 12:58:13 2009 -0500
+++ b/static/scripts/packed/trackster.js Tue Nov 03 13:00:36 2009 -0500
@@ -1,1 +1,1 @@
-var DENSITY=1000,DATA_ERROR="There was an error in indexing this dataset.",DATA_NONE="No data for this chrom/contig.",DATA_PENDING="Currently indexing... please wait",DATA_LOADING="Loading data...",CACHED_TILES=5,CACHED_DATA=20,CONTEXT=$("<canvas></canvas>").get(0).getContext("2d"),RIGHT_STRAND,LEFT_STRAND;var right_img=new Image();right_img.src="../images/visualization/strand_right.png";right_img.onload=function(){RIGHT_STRAND=CONTEXT.createPattern(right_img,"repeat")};var left_img=new Image();left_img.src="../images/visualization/strand_left.png";left_img.onload=function(){LEFT_STRAND=CONTEXT.createPattern(left_img,"repeat")};var right_img_inv=new Image();right_img_inv.src="../images/visualization/strand_right_inv.png";right_img_inv.onload=function(){RIGHT_STRAND_INV=CONTEXT.createPattern(right_img_inv,"repeat")};var left_img_inv=new Image();left_img_inv.src="../images/visualization/strand_left_inv.png";left_img_inv.onload=function(){LEFT_STRAND_INV=CONTEXT.createPattern(l
eft_img_inv,"repeat")};function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}var Cache=function(a){this.num_elements=a;this.obj_cache={};this.key_ary=[]};$.extend(Cache.prototype,{get:function(b){var a=this.key_ary.indexOf(b);if(a!=-1){this.key_ary.splice(a,1);this.key_ary.push(b)}return this.obj_cache[b]},set:function(b,c){if(!this.obj_cache[b]){if(this.key_ary.length>=this.num_elements){var a=this.key_ary.shift();delete this.obj_cache[a]}this.key_ary.push(b)}this.obj_cache[b]=c;return c}});var View=function(b,a){this.chrom=b;this.tracks=[];this.max_low=0;this.max_high=a;this.center=(this.max_high-this.max_low)/2;this.span=this.max_high-this.max_low;this.zoom_factor=2;this.zoom_level=0};$.extend(View.prototype,{add_track:function(a){a.view=this;this.tracks.push(a);if(a.init){a.init()}},redraw:function(){var d=this.span/Math.pow(this.zoom_factor,this.zoom_level),b=this.center-(d/2),e=b+d;if(b<0){b=0;e=b+d}else{if(e>this.max_high){e
=this.max_high;b=e-d}}this.low=Math.floor(b);this.high=Math.ceil(e);this.center=Math.round(this.low+(this.high-this.low)/2);$("#overview-box").css({left:(this.low/this.span)*$("#overview-viewport").width(),width:Math.max(12,((this.high-this.low)/this.span)*$("#overview-viewport").width())}).show();$("#low").val(commatize(this.low));$("#high").val(commatize(this.high));for(var c=0,a=this.tracks.length;c<a;c++){this.tracks[c].draw()}$("#bottom-spacer").remove();$("#viewport").append('<div id="bottom-spacer" style="height: 200px;"></div>')},zoom_in:function(a){if(this.max_high===0||this.high-this.low<30){return}if(a){this.center=a/$(document).width()*(this.high-this.low)+this.low}this.zoom_level+=1;this.redraw()},zoom_out:function(){if(this.max_high===0){return}if(this.zoom_level<=0){this.zoom_level=0;return}this.zoom_level-=1;this.redraw()}});var Track=function(a,b){this.name=a;this.parent_element=b;this.make_container()};$.extend(Track.prototype,{make_container:function(){thi
s.header_div=$("<div class='track-header'>").text(this.name);this.content_div=$("<div class='track-content'>");this.container_div=$("<div class='track'></div>").append(this.header_div).append(this.content_div);this.parent_element.append(this.container_div)}});var TiledTrack=function(){this.tile_cache=new Cache(CACHED_TILES)};$.extend(TiledTrack.prototype,Track.prototype,{draw:function(){var h=this.view.low,d=this.view.high,e=d-h;var c=Math.pow(10,Math.ceil(Math.log(e/DENSITY)/Math.log(10)));c=Math.max(c,0.1);c=Math.min(c,1000000);var j=$("<div style='position: relative;'></div>");this.content_div.children(":first").remove();this.content_div.append(j);var k=this.content_div.width()/e;var g;var a=Math.floor(h/c/DENSITY);while((a*DENSITY*c)<d){var i=this.view.zoom_level+"_"+a;var b=this.tile_cache.get(i);if(b){var f=a*DENSITY*c;b.css({left:(f-this.view.low)*k});j.append(b)}else{g=this.draw_tile(c,a,j,k)}if(g){this.tile_cache.set(i,g)}a+=1}}});var LabelTrack=function(a){Track.ca
ll(this,null,a);this.container_div.addClass("label-track")};$.extend(LabelTrack.prototype,Track.prototype,{draw:function(){var c=this.view,d=c.high-c.low,g=Math.floor(Math.pow(10,Math.floor(Math.log(d)/Math.log(10)))),a=Math.floor(c.low/g)*g,e=this.content_div.width(),b=$("<div style='position: relative; height: 1.3em;'></div>");while(a<c.high){var f=(a-c.low)/d*e;b.append($("<div class='label'>"+commatize(a)+"</div>").css({position:"absolute",left:f-1}));a+=g}this.content_div.children(":first").remove();this.content_div.append(b)}});var LineTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="line";this.height_px=(a?a:100);this.container_div.addClass("line-track");this.dataset_id=b;this.cache=new Cache(CACHED_DATA)};$.extend(LineTrack.prototype,TiledTrack.prototype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{stats:true,track_type:a.track_type,chrom:a.view.chrom,low:null,high:null,dataset_id:a.dat
aset_id},function(c){if(!c||c=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(c=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(c=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.min_value=c.min;a.max_value=c.max;a.vertical_range=a.max_value-a.min_value;var d=$("<div class='yaxislabel'>"+a.min_value+"</div>");var b=$("<div class='yaxislabel'>"+a.max_value+"</div>");b.css({position:"relative",top:"35px"});b.prependTo(a.container_div);d.css({position:"relative",top:a.height_px+32+"px",});d.prependTo(a.container_div);a.draw()}}}})},get_data:function(d,b){var c=this,a=b*DENSITY*d,f=(b+1)*DENSITY*d,e=d+"_"+b;$.getJSON(data_url,{track_type:this.track_type,chrom:this.view.chrom,low:a,high:f,dataset_id:this.dataset_id},function(g){c.cache[e]=g;$(document).trigger("redra
w")})},draw_tile:function(d,a,m,o){if(!this.vertical_range){return}var h=a*DENSITY*d,b=DENSITY*d,c=$("<canvas class='tile'></canvas>"),l=d+"_"+a;if(!this.cache[l]){this.get_data(d,a);return}var g=this.cache[l];c.css({position:"absolute",top:0,left:(h-this.view.low)*o});c.get(0).width=Math.ceil(b*o);c.get(0).height=this.height_px;var n=c.get(0).getContext("2d");var e=false;n.beginPath();for(var f=0;f<g.length-1;f++){var k=g[f][0]-h;var j=g[f][1];if(isNaN(j)){e=false}else{k=k*o;j=(j-this.min_value)/this.vertical_range*this.height_px;if(e){n.lineTo(k,j)}else{n.moveTo(k,j);e=true}}}n.stroke();m.append(c);return c}});var FeatureTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="feature";this.height_px=(a?a:100);this.container_div.addClass("feature-track");this.dataset_id=b;this.zo_slots={};this.show_labels_scale=0.001;this.showing_labels=false;this.vertical_gap=10;this.base_color="#2C3143"};$.extend(FeatureTrack.prototype,TiledTrack.pro
totype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{track_type:a.track_type,low:a.view.max_low,high:a.view.max_high,dataset_id:a.dataset_id,chrom:a.view.chrom},function(b){if(b=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(b.length===0||b=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(b=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.values=b;a.calc_slots();a.slots=a.zo_slots;a.draw()}}}})},calc_slots:function(o){var c=[],b=this.container_div.width()/(this.view.high-this.view.low),g=this.show_labels_scale,a=this.view.max_high,e=this.view.max_low;if(o){this.zi_slots={}}var m=$("<canvas></canvas>").get(0).getContext("2d");for(var f=0,h=this.values.length;f<h;f++){var k,l,n=this.values[f];if(o){k=Math.floor(Math.max(e,(n.star
t-e)*g));k-=m.measureText(n.name).width;l=Math.ceil(Math.min(a,(n.end-e)*g))}else{k=Math.floor(Math.max(e,(n.start-e)*b));l=Math.ceil(Math.min(a,(n.end-e)*b))}var d=0;while(true){if(c[d]===undefined||c[d]<k){c[d]=l;if(o){this.zi_slots[n.name]=d}else{this.zo_slots[n.name]=d}break}d++}}this.height_px=c.length*this.vertical_gap+15;this.content_div.css("height",this.height_px+"px")},draw_tile:function(w,B,g,n){if(!this.values){return null}if(n>this.show_labels_scale&&!this.showing_labels){this.showing_labels=true;if(!this.zi_slots){this.calc_slots(true)}this.slots=this.zi_slots}else{if(n<=this.show_labels_scale&&this.showing_labels){this.showing_labels=false;this.slots=this.zo_slots}}var C=B*DENSITY*w,c=(B+1)*DENSITY*w,q=DENSITY*w;var u=Math.ceil(q*n),t=this.height_px,s=$("<canvas class='tile'></canvas>");s.css({position:"absolute",top:0,left:(C-this.view.low)*n});s.get(0).width=u;s.get(0).height=t;var v=s.get(0).getContext("2d");v.fillStyle=this.base_color;v.font="10px monospac
e";v.textAlign="right";var y=0;for(var z=0,A=this.values.length;z<A;z++){var f=this.values[z];if(f.start<=c&&f.end>=C){var e=Math.floor(Math.max(0,(f.start-C)*n)),h=Math.ceil(Math.min(u,(f.end-C)*n)),d=this.slots[f.name]*this.vertical_gap;var a,G,b=null,o=null;if(f.thick_start&&f.thick_end){b=Math.floor(Math.max(0,(f.thick_start-C)*n));o=Math.ceil(Math.min(u,(f.thick_end-C)*n))}if(!this.showing_labels){v.fillRect(e,d+5,h-e,1)}else{if(v.fillText){v.fillText(f.name,e-1,d+8)}var E=f.blocks;if(E){if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND}else{if(f.strand=="-"){v.fillStyle=LEFT_STRAND}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}for(var x=0,F=E.length;x<F;x++){var m=E[x],l=Math.floor(Math.max(0,(m[0]-C)*n)),D=Math.ceil(Math.min(u,(m[1]-C)*n));a=5;G=3;v.fillRect(l,d+G,D-l,a);if(b&&(l<o||D>b)){a=9;G=1;var r=Math.max(l,b),p=Math.min(D,o);v.fillRect(r,d+G,p-r,a)}}}else{a=9;G=1;v.fillRect(e,d+G,h-e,a);if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND_INV}els
e{if(f.strand=="-"){v.fillStyle=LEFT_STRAND_INV}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}}}y++}}g.append(s);return s}});
\ No newline at end of file
+var DENSITY=1000,DATA_ERROR="There was an error in indexing this dataset.",DATA_NONE="No data for this chrom/contig.",DATA_PENDING="Currently indexing... please wait",DATA_LOADING="Loading data...",CACHED_TILES=10,CACHED_DATA=20,CONTEXT=$("<canvas></canvas>").get(0).getContext("2d"),RIGHT_STRAND,LEFT_STRAND;var right_img=new Image();right_img.src="../images/visualization/strand_right.png";right_img.onload=function(){RIGHT_STRAND=CONTEXT.createPattern(right_img,"repeat")};var left_img=new Image();left_img.src="../images/visualization/strand_left.png";left_img.onload=function(){LEFT_STRAND=CONTEXT.createPattern(left_img,"repeat")};var right_img_inv=new Image();right_img_inv.src="../images/visualization/strand_right_inv.png";right_img_inv.onload=function(){RIGHT_STRAND_INV=CONTEXT.createPattern(right_img_inv,"repeat")};var left_img_inv=new Image();left_img_inv.src="../images/visualization/strand_left_inv.png";left_img_inv.onload=function(){LEFT_STRAND_INV=CONTEXT.createPattern(
left_img_inv,"repeat")};function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}var Cache=function(a){this.num_elements=a;this.obj_cache={};this.key_ary=[]};$.extend(Cache.prototype,{get:function(b){var a=this.key_ary.indexOf(b);if(a!=-1){this.key_ary.splice(a,1);this.key_ary.push(b)}return this.obj_cache[b]},set:function(b,c){if(!this.obj_cache[b]){if(this.key_ary.length>=this.num_elements){var a=this.key_ary.shift();delete this.obj_cache[a]}this.key_ary.push(b)}this.obj_cache[b]=c;return c}});var View=function(b,a){this.chrom=b;this.tracks=[];this.max_low=0;this.max_high=a;this.center=(this.max_high-this.max_low)/2;this.span=this.max_high-this.max_low;this.zoom_factor=2;this.zoom_level=0};$.extend(View.prototype,{add_track:function(a){a.view=this;this.tracks.push(a);if(a.init){a.init()}},redraw:function(){var d=this.span/Math.pow(this.zoom_factor,this.zoom_level),b=this.center-(d/2),e=b+d;if(b<0){b=0;e=b+d}else{if(e>this.max_high){
e=this.max_high;b=e-d}}this.low=Math.floor(b);this.high=Math.ceil(e);this.center=Math.round(this.low+(this.high-this.low)/2);$("#overview-box").css({left:(this.low/this.span)*$("#overview-viewport").width(),width:Math.max(12,((this.high-this.low)/this.span)*$("#overview-viewport").width())}).show();$("#low").val(commatize(this.low));$("#high").val(commatize(this.high));for(var c=0,a=this.tracks.length;c<a;c++){this.tracks[c].draw()}$("#bottom-spacer").remove();$("#viewport").append('<div id="bottom-spacer" style="height: 200px;"></div>')},zoom_in:function(a){if(this.max_high===0||this.high-this.low<30){return}if(a){this.center=a/$(document).width()*(this.high-this.low)+this.low}this.zoom_level+=1;this.redraw()},zoom_out:function(){if(this.max_high===0){return}if(this.zoom_level<=0){this.zoom_level=0;return}this.zoom_level-=1;this.redraw()}});var Track=function(a,b){this.name=a;this.parent_element=b;this.make_container()};$.extend(Track.prototype,{make_container:function(){th
is.header_div=$("<div class='track-header'>").text(this.name);this.content_div=$("<div class='track-content'>");this.container_div=$("<div class='track'></div>").append(this.header_div).append(this.content_div);this.parent_element.append(this.container_div)}});var TiledTrack=function(){this.tile_cache=new Cache(CACHED_TILES)};$.extend(TiledTrack.prototype,Track.prototype,{draw:function(){var h=this.view.low,d=this.view.high,e=d-h;var c=Math.pow(10,Math.ceil(Math.log(e/DENSITY)/Math.log(10)));c=Math.max(c,0.1);c=Math.min(c,1000000);var j=$("<div style='position: relative;'></div>");this.content_div.children(":first").remove();this.content_div.append(j);var k=this.content_div.width()/e;var g;var a=Math.floor(h/c/DENSITY);while((a*DENSITY*c)<d){var i=this.content_div.width()+"_"+this.view.zoom_level+"_"+a;var b=this.tile_cache.get(i);if(b){var f=a*DENSITY*c;b.css({left:(f-this.view.low)*k});j.append(b)}else{g=this.draw_tile(c,a,j,k);if(g){this.tile_cache.set(i,g)}}a+=1}}});var
LabelTrack=function(a){Track.call(this,null,a);this.container_div.addClass("label-track")};$.extend(LabelTrack.prototype,Track.prototype,{draw:function(){var c=this.view,d=c.high-c.low,g=Math.floor(Math.pow(10,Math.floor(Math.log(d)/Math.log(10)))),a=Math.floor(c.low/g)*g,e=this.content_div.width(),b=$("<div style='position: relative; height: 1.3em;'></div>");while(a<c.high){var f=(a-c.low)/d*e;b.append($("<div class='label'>"+commatize(a)+"</div>").css({position:"absolute",left:f-1}));a+=g}this.content_div.children(":first").remove();this.content_div.append(b)}});var LineTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="line";this.height_px=(a?a:100);this.container_div.addClass("line-track");this.dataset_id=b;this.cache=new Cache(CACHED_DATA)};$.extend(LineTrack.prototype,TiledTrack.prototype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{stats:true,track_type:a.track_type,chrom:a.view.chrom,low:
null,high:null,dataset_id:a.dataset_id},function(c){if(!c||c=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(c=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(c=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.min_value=c.min;a.max_value=c.max;a.vertical_range=a.max_value-a.min_value;var d=$("<div class='yaxislabel'>"+a.min_value+"</div>");var b=$("<div class='yaxislabel'>"+a.max_value+"</div>");b.css({position:"relative",top:"35px"});b.prependTo(a.container_div);d.css({position:"relative",top:a.height_px+32+"px",});d.prependTo(a.container_div);a.draw()}}}})},get_data:function(d,b){var c=this,a=b*DENSITY*d,f=(b+1)*DENSITY*d,e=d+"_"+b;$.getJSON(data_url,{track_type:this.track_type,chrom:this.view.chrom,low:a,high:f,dataset_id:this.dataset_id},function(g){c.cache[
e]=g;$(document).trigger("redraw")})},draw_tile:function(d,a,m,o){if(!this.vertical_range){return}var h=a*DENSITY*d,b=DENSITY*d,c=$("<canvas class='tile'></canvas>"),l=d+"_"+a;if(!this.cache[l]){this.get_data(d,a);return}var g=this.cache[l];c.css({position:"absolute",top:0,left:(h-this.view.low)*o});c.get(0).width=Math.ceil(b*o);c.get(0).height=this.height_px;var n=c.get(0).getContext("2d");var e=false;n.beginPath();for(var f=0;f<g.length-1;f++){var k=g[f][0]-h;var j=g[f][1];if(isNaN(j)){e=false}else{k=k*o;j=(j-this.min_value)/this.vertical_range*this.height_px;if(e){n.lineTo(k,j)}else{n.moveTo(k,j);e=true}}}n.stroke();m.append(c);return c}});var FeatureTrack=function(c,b,a){Track.call(this,c,$("#viewport"));TiledTrack.call(this);this.track_type="feature";this.height_px=(a?a:100);this.container_div.addClass("feature-track");this.dataset_id=b;this.zo_slots={};this.show_labels_scale=0.001;this.showing_labels=false;this.vertical_gap=10;this.base_color="#2C3143"};$.extend(Featur
eTrack.prototype,TiledTrack.prototype,{init:function(){var a=this;a.content_div.text(DATA_LOADING);$.getJSON(data_url,{track_type:a.track_type,low:a.view.max_low,high:a.view.max_high,dataset_id:a.dataset_id,chrom:a.view.chrom},function(b){if(b=="error"){a.container_div.addClass("error");a.content_div.text(DATA_ERROR)}else{if(b.length===0||b=="no data"){a.container_div.addClass("nodata");a.content_div.text(DATA_NONE)}else{if(b=="pending"){a.container_div.addClass("pending");a.content_div.text(DATA_PENDING);setTimeout(function(){a.init()},5000)}else{a.content_div.text("");a.content_div.css("height",a.height_px+"px");a.values=b;a.calc_slots();a.slots=a.zo_slots;a.draw()}}}})},calc_slots:function(o){var c=[],b=this.content_div.width()/(this.view.high-this.view.low),g=this.show_labels_scale,a=this.view.max_high,e=this.view.max_low;if(o){this.zi_slots={}}var m=$("<canvas></canvas>").get(0).getContext("2d");for(var f=0,h=this.values.length;f<h;f++){var k,l,n=this.values[f];if(o){k=
Math.floor((n.start-e)*g);k-=m.measureText(n.name).width;l=Math.ceil((n.end-e)*g)}else{k=Math.floor((n.start-e)*b);l=Math.ceil((n.end-e)*b)}var d=0;while(true){if(c[d]===undefined||c[d]<k){c[d]=l;if(o){this.zi_slots[n.name]=d}else{this.zo_slots[n.name]=d}break}d++}}this.height_px=c.length*this.vertical_gap+15;this.content_div.css("height",this.height_px+"px")},draw_tile:function(w,B,g,n){if(!this.values){return null}if(n>this.show_labels_scale&&!this.showing_labels){this.showing_labels=true;if(!this.zi_slots){this.calc_slots(true)}this.slots=this.zi_slots}else{if(n<=this.show_labels_scale&&this.showing_labels){this.showing_labels=false;this.slots=this.zo_slots}}var C=B*DENSITY*w,c=(B+1)*DENSITY*w,q=DENSITY*w;var u=Math.ceil(q*n),t=this.height_px,s=$("<canvas class='tile'></canvas>");s.css({position:"absolute",top:0,left:(C-this.view.low)*n});s.get(0).width=u;s.get(0).height=t;var v=s.get(0).getContext("2d");v.fillStyle=this.base_color;v.font="10px monospace";v.textAlign="rig
ht";var y=0;for(var z=0,A=this.values.length;z<A;z++){var f=this.values[z];if(f.start<=c&&f.end>=C){var e=Math.floor(Math.max(0,(f.start-C)*n)),h=Math.ceil(Math.min(u,(f.end-C)*n)),d=this.slots[f.name]*this.vertical_gap;var a,G,b=null,o=null;if(f.thick_start&&f.thick_end){b=Math.floor(Math.max(0,(f.thick_start-C)*n));o=Math.ceil(Math.min(u,(f.thick_end-C)*n))}if(!this.showing_labels){v.fillRect(e,d+5,h-e,1)}else{if(v.fillText){v.fillText(f.name,e-1,d+8)}var E=f.blocks;if(E){if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND}else{if(f.strand=="-"){v.fillStyle=LEFT_STRAND}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}for(var x=0,F=E.length;x<F;x++){var m=E[x],l=Math.floor(Math.max(0,(m[0]-C)*n)),D=Math.ceil(Math.min(u,(m[1]-C)*n));a=5;G=3;v.fillRect(l,d+G,D-l,a);if(b&&(l<o||D>b)){a=9;G=1;var r=Math.max(l,b),p=Math.min(D,o);v.fillRect(r,d+G,p-r,a)}}}else{a=9;G=1;v.fillRect(e,d+G,h-e,a);if(f.strand){if(f.strand=="+"){v.fillStyle=RIGHT_STRAND_INV}else{if(f.strand=="-")
{v.fillStyle=LEFT_STRAND_INV}}v.fillRect(e,d,h-e,10);v.fillStyle=this.base_color}}}y++}}g.append(s);return s}});
\ No newline at end of file
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/133252175425
changeset: 2949:133252175425
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 12:52:00 2009 -0500
description:
imported patch alchemy05_fixes_01
diffstat:
eggs.ini | 10 +-
lib/galaxy/model/mapping.py | 2 +-
lib/galaxy/model/migrate/check.py | 2 +-
lib/galaxy/model/migrate/versions/0003_security_and_libraries.py | 4 +-
lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py | 2 +-
lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py | 4 +-
lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py | 2 +-
lib/galaxy/model/migrate/versions/0008_galaxy_forms.py | 4 +-
lib/galaxy/model/migrate/versions/0009_request_table.py | 2 +-
lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py | 4 +-
lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py | 4 +-
lib/galaxy/model/migrate/versions/0012_user_address.py | 4 +-
lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py | 2 +-
lib/galaxy/model/migrate/versions/0017_library_item_indexes.py | 2 +-
lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py | 2 +-
lib/galaxy/model/migrate/versions/0019_request_library_folder.py | 4 +-
lib/galaxy/model/migrate/versions/0020_library_upload_job.py | 4 +-
lib/galaxy/model/orm/ext/assignmapper.py | 38 ++++++---
lib/galaxy/web/controllers/forms.py | 2 +-
lib/galaxy/web/controllers/history.py | 2 +-
lib/galaxy/web/controllers/page.py | 4 +-
lib/galaxy/web/controllers/workflow.py | 22 ++--
lib/galaxy/web/framework/helpers/grids.py | 4 +-
23 files changed, 73 insertions(+), 57 deletions(-)
diffs (502 lines):
diff -r 80915982fdb2 -r 133252175425 eggs.ini
--- a/eggs.ini Tue Nov 03 11:28:34 2009 -0500
+++ b/eggs.ini Tue Nov 03 12:52:00 2009 -0500
@@ -28,6 +28,7 @@
[eggs:noplatform]
amqplib = 0.6.1
Beaker = 1.4
+decorator = 3.1.2
docutils = 0.4
elementtree = 1.2.6_20050316
lrucache = 0.2
@@ -41,8 +42,8 @@
PasteScript = 1.3.6
Routes = 1.6.3
simplejson = 1.5
-SQLAlchemy = 0.4.7p1
-sqlalchemy_migrate = 0.4.5
+SQLAlchemy = 0.5.6
+sqlalchemy_migrate = 0.5.4
Tempita = 0.1
twill = 0.9
WebError = 0.8a
@@ -77,6 +78,7 @@
guppy = http://pypi.python.org/packages/source/g/guppy/guppy-0.1.8.tar.gz
amqplib = http://py-amqplib.googlecode.com/files/amqplib-0.6.1.tgz
Beaker = http://cheeseshop.python.org/packages/source/B/Beaker/Beaker-1.4.tar.gz
+decorator = http://pypi.python.org/packages/source/d/decorator/decorator-3.1.2.tar.gz
docutils = http://downloads.sourceforge.net/docutils/docutils-0.4.tar.gz
elementtree = http://effbot.org/downloads/elementtree-1.2.6-20050316.tar.gz
lrucache = http://evan.prodromou.name/lrucache/lrucache-0.2.tar.gz
@@ -90,8 +92,8 @@
PSI = http://pypi.python.org/packages/source/P/PSI/PSI-0.3b1.1.tar.gz
Routes = http://pypi.python.org/packages/source/R/Routes/Routes-1.6.3.tar.gz
simplejson = http://cheeseshop.python.org/packages/source/s/simplejson/simplejson-1.5.ta…
-SQLAlchemy = http://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.4.7p1.tar.…
-sqlalchemy_migrate = http://pypi.python.org/packages/source/s/sqlalchemy-migrate/sqlalchemy-migr…
+SQLAlchemy = http://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.5.6.tar.gz
+sqlalchemy_migrate = http://pypi.python.org/packages/source/s/sqlalchemy-migrate/sqlalchemy-migr…
Tempita = http://pypi.python.org/packages/source/T/Tempita/Tempita-0.1.tar.gz
twill = http://darcs.idyll.org/~t/projects/twill-0.9.tar.gz
WebError = http://pypi.python.org/packages/source/W/WebError/WebError-0.8a.tar.gz
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/mapping.py Tue Nov 03 12:52:00 2009 -0500
@@ -18,7 +18,7 @@
from sqlalchemy.ext.associationproxy import association_proxy
metadata = MetaData()
-context = Session = scoped_session( sessionmaker( autoflush=False, transactional=False ) )
+context = Session = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
# For backward compatibility with "context.current"
context.current = Session
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/check.py
--- a/lib/galaxy/model/migrate/check.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/check.py Tue Nov 03 12:52:00 2009 -0500
@@ -7,7 +7,7 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
-from sqlalchemy.exceptions import NoSuchTableError
+from sqlalchemy.exc import NoSuchTableError
log = logging.getLogger( __name__ )
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0003_security_and_libraries.py
--- a/lib/galaxy/model/migrate/versions/0003_security_and_libraries.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0003_security_and_libraries.py Tue Nov 03 12:52:00 2009 -0500
@@ -1,6 +1,6 @@
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
@@ -20,7 +20,7 @@
from galaxy.model.custom_types import *
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
if migrate_engine.name == 'postgres':
# http://blog.pythonisito.com/2008/01/cascading-drop-table-with-sqlalchemy.ht…
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py
--- a/lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py Tue Nov 03 12:52:00 2009 -0500
@@ -12,7 +12,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
User_table = Table( "galaxy_user", metadata, autoload=True )
HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py
--- a/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Tue Nov 03 12:52:00 2009 -0500
@@ -23,7 +23,7 @@
metadata = MetaData( migrate_engine )
-context = scoped_session( sessionmaker( autoflush=False, transactional=False ) )
+context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
## classes
@@ -662,7 +662,7 @@
log.debug( "Fixing a discrepancy concerning deleted shared history items." )
affected_items = 0
start_time = time.time()
- for dataset in context.query( Dataset ).filter( and_( Dataset.c.deleted == True, Dataset.c.purged == False ) ):
+ for dataset in context.query( Dataset ).filter( and_( Dataset.deleted == True, Dataset.purged == False ) ):
for dataset_instance in dataset.history_associations + dataset.library_associations:
if not dataset_instance.deleted:
dataset.deleted = False
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py
--- a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py Tue Nov 03 12:52:00 2009 -0500
@@ -16,7 +16,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0008_galaxy_forms.py
--- a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py Tue Nov 03 12:52:00 2009 -0500
@@ -11,7 +11,7 @@
"""
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
@@ -31,7 +31,7 @@
from galaxy.model.custom_types import *
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0009_request_table.py
--- a/lib/galaxy/model/migrate/versions/0009_request_table.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0009_request_table.py Tue Nov 03 12:52:00 2009 -0500
@@ -9,7 +9,7 @@
from migrate.changeset import *
import sys, logging
from galaxy.model.custom_types import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
log = logging.getLogger( __name__ )
log.setLevel(logging.DEBUG)
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py
--- a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py Tue Nov 03 12:52:00 2009 -0500
@@ -10,7 +10,7 @@
"""
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
@@ -30,7 +30,7 @@
from galaxy.model.custom_types import *
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py
--- a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py Tue Nov 03 12:52:00 2009 -0500
@@ -5,7 +5,7 @@
"""
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
@@ -25,7 +25,7 @@
from galaxy.model.custom_types import *
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0012_user_address.py
--- a/lib/galaxy/model/migrate/versions/0012_user_address.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0012_user_address.py Tue Nov 03 12:52:00 2009 -0500
@@ -6,7 +6,7 @@
"""
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
import datetime
@@ -24,7 +24,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py
--- a/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py Tue Nov 03 12:52:00 2009 -0500
@@ -17,7 +17,7 @@
"""
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
import sys, logging
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0017_library_item_indexes.py
--- a/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py Tue Nov 03 12:52:00 2009 -0500
@@ -16,7 +16,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
LibraryDataset_table = Table( "library_dataset", metadata, autoload=True )
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
--- a/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py Tue Nov 03 12:52:00 2009 -0500
@@ -5,7 +5,7 @@
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
import migrate.changeset
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0019_request_library_folder.py
--- a/lib/galaxy/model/migrate/versions/0019_request_library_folder.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py Tue Nov 03 12:52:00 2009 -0500
@@ -1,6 +1,6 @@
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
import datetime
@@ -18,7 +18,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/migrate/versions/0020_library_upload_job.py
--- a/lib/galaxy/model/migrate/versions/0020_library_upload_job.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py Tue Nov 03 12:52:00 2009 -0500
@@ -1,6 +1,6 @@
from sqlalchemy import *
from sqlalchemy.orm import *
-from sqlalchemy.exceptions import *
+from sqlalchemy.exc import *
from migrate import *
from migrate.changeset import *
import datetime
@@ -18,7 +18,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print ""
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/model/orm/ext/assignmapper.py
--- a/lib/galaxy/model/orm/ext/assignmapper.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/model/orm/ext/assignmapper.py Tue Nov 03 12:52:00 2009 -0500
@@ -15,22 +15,36 @@
from sqlalchemy import util, exceptions
import types
-from sqlalchemy.orm import mapper, Query
+from sqlalchemy.orm import Query
+from sqlalchemy.orm import mapper as sqla_mapper
-def _monkeypatch_session_method(name, session, class_, make_list=False):
- def do(self, *args, **kwargs):
- if make_list:
- self = [ self ]
- return getattr(session, name)( self, *args, **kwargs )
+def _monkeypatch_session_method( name, session, class_ ):
+ # TODO: eliminate this method by fixing the session flushes
+ def do( self, *args, **kwargs ):
+ if self not in session.deleted:
+ session.add( self )
+ return session.flush()
try:
do.__name__ = name
except:
pass
- if not hasattr(class_, name):
- setattr(class_, name, do)
-
+ if not hasattr( class_, name ):
+ setattr( class_, name, do )
+def session_mapper( scoped_session, class_, *args, **kwargs ):
+ def mapper( cls, *arg, **kw ):
+ validate = kw.pop( 'validate', False )
+ if cls.__init__ is object.__init__:
+ def __init__( self, **kwargs ):
+ for key, value in kwargs.items():
+ if validate:
+ if not cls_mapper.has_property( key ):
+ raise TypeError( "Invalid __init__ argument: '%s'" % key )
+ setattr( self, key, value )
+ cls.__init__ = __init__
+ cls.query = scoped_session.query_property()
+ _monkeypatch_session_method( 'flush', scoped_session, cls )
+ return sqla_mapper( cls, *arg, **kw )
+ return mapper( class_, *args, **kwargs )
def assign_mapper( session, class_, *args, **kwargs ):
- m = class_.mapper = session.mapper( class_, *args, **kwargs )
- for name in ( 'flush', ):
- _monkeypatch_session_method( name, session, class_, make_list=True )
+ m = class_.mapper = session_mapper( session, class_, *args, **kwargs )
return m
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/controllers/forms.py Tue Nov 03 12:52:00 2009 -0500
@@ -450,7 +450,7 @@
# create corresponding row in the form_definition_current table
fd.form_definition_current = fdc
fdc.latest_form = fd
- trans.sa_session.save_or_update( fdc )
+ trans.sa_session.add( fdc )
trans.sa_session.flush()
msg = "The new form named '%s' has been created. " % (fd.name)
return fd, msg
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/controllers/history.py Tue Nov 03 12:52:00 2009 -0500
@@ -726,7 +726,7 @@
share.history = history
share.user = send_to_user
session = trans.sa_session
- session.save_or_update( share )
+ session.add( share )
session.flush()
if history not in shared_histories:
shared_histories.append( history )
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/web/controllers/page.py
--- a/lib/galaxy/web/controllers/page.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/controllers/page.py Tue Nov 03 12:52:00 2009 -0500
@@ -135,7 +135,7 @@
page_revision.content = ""
# Persist
session = trans.sa_session
- session.save_or_update( page )
+ session.add( page )
session.flush()
# Display the management page
## trans.set_message( "Page '%s' created" % page.title )
@@ -240,4 +240,4 @@
raise web.httpexceptions.HTTPNotFound()
return trans.fill_template( "page/display.mako", page=page )
-
\ No newline at end of file
+
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/controllers/workflow.py Tue Nov 03 12:52:00 2009 -0500
@@ -30,7 +30,7 @@
user = trans.get_user()
workflows = trans.sa_session.query( model.StoredWorkflow ) \
.filter_by( user=user, deleted=False ) \
- .order_by( desc( model.StoredWorkflow.c.update_time ) ) \
+ .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
.all()
shared_by_others = trans.sa_session \
.query( model.StoredWorkflowUserShareAssociation ) \
@@ -53,13 +53,13 @@
user = trans.get_user()
workflows = trans.sa_session.query( model.StoredWorkflow ) \
.filter_by( user=user, deleted=False ) \
- .order_by( desc( model.StoredWorkflow.c.update_time ) ) \
+ .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
.all()
shared_by_others = trans.sa_session \
.query( model.StoredWorkflowUserShareAssociation ) \
.filter_by( user=user ) \
- .filter( model.StoredWorkflow.c.deleted == False ) \
- .order_by( desc( model.StoredWorkflow.c.update_time ) ) \
+ .filter( model.StoredWorkflow.deleted == False ) \
+ .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
.all()
return trans.fill_template( "workflow/list_for_run.mako",
workflows = workflows,
@@ -91,7 +91,7 @@
share.stored_workflow = stored
share.user = other
session = trans.sa_session
- session.save_or_update( share )
+ session.add( share )
session.flush()
trans.set_message( "Workflow '%s' shared with user '%s'" % ( stored.name, other.email ) )
return trans.response.send_redirect( url_for( controller='workflow', action='sharing', id=id ) )
@@ -142,7 +142,7 @@
share.stored_workflow = stored
share.user = trans.user
session = trans.sa_session
- session.save_or_update( share )
+ session.add( share )
session.flush()
# Redirect to load galaxy frames.
return trans.response.send_redirect( url_for( controller='workflow' ) )
@@ -180,7 +180,7 @@
new_stored.user = user
# Persist
session = trans.sa_session
- session.save_or_update( new_stored )
+ session.add( new_stored )
session.flush()
# Display the management page
trans.set_message( 'Clone created with name "%s"' % new_stored.name )
@@ -205,7 +205,7 @@
stored_workflow.latest_workflow = workflow
# Persist
session = trans.sa_session
- session.save_or_update( stored_workflow )
+ session.add( stored_workflow )
session.flush()
# Display the management page
trans.set_message( "Workflow '%s' created" % stored_workflow.name )
@@ -514,7 +514,7 @@
stored.name = workflow_name
workflow.stored_workflow = stored
stored.latest_workflow = workflow
- trans.sa_session.save_or_update( stored )
+ trans.sa_session.add( stored )
trans.sa_session.flush()
# Index page with message
return trans.show_message( "Workflow '%s' created from current history." % workflow_name )
@@ -656,12 +656,12 @@
ids_in_menu = set( [ x.stored_workflow_id for x in user.stored_workflow_menu_entries ] )
workflows = trans.sa_session.query( model.StoredWorkflow ) \
.filter_by( user=user, deleted=False ) \
- .order_by( desc( model.StoredWorkflow.c.update_time ) ) \
+ .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
.all()
shared_by_others = trans.sa_session \
.query( model.StoredWorkflowUserShareAssociation ) \
.filter_by( user=user ) \
- .filter( model.StoredWorkflow.c.deleted == False ) \
+ .filter( model.StoredWorkflow.deleted == False ) \
.all()
return trans.fill_template( "workflow/configure_menu.mako",
workflows=workflows,
diff -r 80915982fdb2 -r 133252175425 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/framework/helpers/grids.py Tue Nov 03 12:52:00 2009 -0500
@@ -111,10 +111,10 @@
if sort_key.startswith( "-" ):
sort_key = sort_key[1:]
sort_order = 'desc'
- query = query.order_by( self.model_class.c.get( sort_key ).desc() )
+ query = query.order_by( self.model_class.table.c.get( sort_key ).desc() )
else:
sort_order = 'asc'
- query = query.order_by( self.model_class.c.get( sort_key ).asc() )
+ query = query.order_by( self.model_class.table.c.get( sort_key ).asc() )
extra_url_args['sort'] = encoded_sort_key
# There might be a current row
1
0

07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/f776fa6045ba
changeset: 2947:f776fa6045ba
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Nov 03 12:58:13 2009 -0500
description:
Improved search functionality for history grid: (a) generic free text search box and (b) advanced search using name, tag, deleted status, and shared status.
diffstat:
lib/galaxy/tags/tag_handler.py | 8 +-
lib/galaxy/web/controllers/history.py | 145 +++++++++++++++++---
lib/galaxy/web/framework/helpers/grids.py | 1 +
static/scripts/autocomplete_tagging.js | 2 +-
templates/history/grid.mako | 250 +++++++++++++++++++---------------
test/base/twilltestcase.py | 18 +-
test/functional/test_history_functions.py | 2 +-
7 files changed, 278 insertions(+), 148 deletions(-)
diffs (649 lines):
diff -r 80915982fdb2 -r f776fa6045ba lib/galaxy/tags/tag_handler.py
--- a/lib/galaxy/tags/tag_handler.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/tags/tag_handler.py Tue Nov 03 12:58:13 2009 -0500
@@ -3,6 +3,12 @@
class TagHandler( object ):
+ # Minimum tag length.
+ min_tag_len = 2
+
+ # Maximum tag length.
+ max_tag_len = 255
+
# Tag separator.
tag_separators = ',;'
@@ -215,7 +221,7 @@
scrubbed_name = scrubbed_name[1:]
# If name is too short or too long, return None.
- if len(scrubbed_name) < 3 or len(scrubbed_name) > 255:
+ if len(scrubbed_name) < self.min_tag_len or len(scrubbed_name) > self.max_tag_len:
return None
return scrubbed_name
diff -r 80915982fdb2 -r f776fa6045ba lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/controllers/history.py Tue Nov 03 12:58:13 2009 -0500
@@ -5,6 +5,7 @@
from galaxy.model import History
from galaxy.model.orm import *
from galaxy.util.json import *
+from galaxy.util.odict import odict
from galaxy.tags.tag_handler import TagHandler
from sqlalchemy.sql.expression import ClauseElement
import webhelpers, logging, operator
@@ -19,12 +20,29 @@
class HistoryListGrid( grids.Grid ):
# Custom column types
class NameColumn( grids.GridColumn ):
- def __init( self, key, link, attach_popup ):
+ def __init( self, key, link, attach_popup, filterable ):
grids.GridColumn.__init__(self, key, link, attach_popup)
def get_value( self, trans, grid, history ):
return history.get_display_name()
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to filter histories by name. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ query = query.filter( func.lower( History.name ).like( "%" + column_filter.lower() + "%" ) )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["FREETEXT"] = "FREETEXT"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.iteritems():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
class DatasetsByStateColumn( grids.GridColumn ):
def get_value( self, trans, grid, history ):
rval = []
@@ -48,6 +66,7 @@
if item.users_shared_with or item.importable:
return dict( operation="sharing" )
return None
+
class TagsColumn( grids.GridColumn ):
def __init__( self, col_name, key, filterable ):
grids.GridColumn.__init__(self, col_name, key=key, filterable=filterable)
@@ -61,7 +80,7 @@
return div_elt + trans.fill_template( "/tagging_common.mako", trans=trans, tagged_item=history,
elt_id = elt_id, in_form="true", input_size="20", tag_click_fn="add_tag_to_grid_filter" )
def filter( self, db_session, query, column_filter ):
- """ Modify query to include only histories with tags in column_filter. """
+ """ Modify query to filter histories by tag. """
if column_filter == "All":
pass
elif column_filter:
@@ -69,52 +88,115 @@
tag_handler = TagHandler()
raw_tags = tag_handler.parse_tags( column_filter.encode("utf-8") )
for name, value in raw_tags.items():
- tag = tag_handler.get_tag_by_name( db_session, name )
- if tag:
- query = query.filter( History.tags.any( tag_id=tag.id ) )
+ if name:
+ # Search for tag names.
+ query = query.filter( History.tags.any( func.lower( model.HistoryTagAssociation.user_tname ).like( "%" + name.lower() + "%" ) ) )
if value:
- query = query.filter( History.tags.any( value=value.lower() ) )
- else:
- # Tag doesn't exist; unclear what to do here, but the literal thing to do is add the criterion, which
- # will then yield a query that returns no results.
- query = query.filter( History.tags.any( user_tname=name ) )
+ # Search for tag values.
+ query = query.filter( History.tags.any( func.lower( model.HistoryTagAssociation.user_value ).like( "%" + value.lower() + "%" ) ) )
return query
def get_accepted_filters( self ):
- """ Returns a list of accepted filters for this column. """
- accepted_filter_labels_and_vals = { "All": "All" }
- accepted_filters = []
- for label, val in accepted_filter_labels_and_vals.items():
- args = { self.key: val }
- accepted_filters.append( grids.GridColumnFilter( label, args) )
- return accepted_filters
-
-
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["FREETEXT"] = "FREETEXT"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.iteritems():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
class DeletedColumn( grids.GridColumn ):
def get_accepted_filters( self ):
""" Returns a list of accepted filters for this column. """
- accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
+ accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" }
accepted_filters = []
for label, val in accepted_filter_labels_and_vals.items():
args = { self.key: val }
accepted_filters.append( grids.GridColumnFilter( label, args) )
return accepted_filters
+
+ class SharingColumn( grids.GridColumn ):
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to filter histories by sharing status. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ if column_filter == "private":
+ query = query.filter( History.users_shared_with == None )
+ query = query.filter( History.importable == False )
+ elif column_filter == "shared":
+ query = query.filter( History.users_shared_with != None )
+ elif column_filter == "importable":
+ query = query.filter( History.importable == True )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["private"] = "private"
+ accepted_filter_labels_and_vals["shared"] = "shared"
+ accepted_filter_labels_and_vals["importable"] = "importable"
+ accepted_filter_labels_and_vals["all"] = "All"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
+
+ class FreeTextSearchColumn( grids.GridColumn ):
+ def filter( self, db_session, query, column_filter ):
+ """ Modify query to search tags and history names. """
+ if column_filter == "All":
+ pass
+ elif column_filter:
+ # Build tags filter.
+ tag_handler = TagHandler()
+ raw_tags = tag_handler.parse_tags( column_filter.encode("utf-8") )
+ tags_filter = None
+ for name, value in raw_tags.items():
+ if name:
+ # Search for tag names.
+ tags_filter = History.tags.any( func.lower( model.HistoryTagAssociation.user_tname ).like( "%" + name.lower() + "%" ) )
+ if value:
+ # Search for tag values.
+ tags_filter = and_( tags_filter, func.lower( History.tags.any( model.HistoryTagAssociation.user_value ).like( "%" + value.lower() + "%" ) ) )
+
+ # Build history name filter.
+ history_name_filter = func.lower( History.name ).like( "%" + column_filter.lower() + "%" )
+
+ # Apply filters to query.
+ if tags_filter:
+ query = query.filter( or_( tags_filter, history_name_filter ) )
+ else:
+ query = query.filter( history_name_filter )
+ return query
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = odict()
+ accepted_filter_labels_and_vals["FREETEXT"] = "FREETEXT"
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.iteritems():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
# Grid definition
- title = "Stored histories"
+ title = "Saved Histories"
model_class = model.History
template='/history/grid.mako'
default_sort_key = "-create_time"
columns = [
NameColumn( "Name", key="name",
link=( lambda history: iff( history.deleted, None, dict( operation="switch", id=history.id ) ) ),
- attach_popup=True ),
+ attach_popup=True, filterable=True ),
DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
TagsColumn( "Tags", key="tags", filterable=True),
StatusColumn( "Status", attach_popup=False ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
- # Valid for filtering but invisible
- DeletedColumn( "Status", key="deleted", visible=False, filterable=True )
+ # Columns that are valid for filtering but are not visible.
+ DeletedColumn( "Deleted", key="deleted", visible=False, filterable=True ),
+ SharingColumn( "Shared", key="shared", visible=False, filterable=True ),
+ FreeTextSearchColumn( "Search", key="free-text-search", visible=False ) # Not filterable because it's the default search.
]
operations = [
grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
@@ -131,7 +213,7 @@
grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted='All' ) ),
]
- default_filter = dict( deleted="False", tags="All" )
+ default_filter = dict( name="All", deleted="False", tags="All", shared="All" )
num_rows_per_page = 50
preserve_state = False
use_paging = True
@@ -160,6 +242,7 @@
template='/history/grid.mako'
model_class = model.History
default_sort_key = "-update_time"
+ default_filter = {}
columns = [
grids.GridColumn( "Name", key="name" ),
DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
@@ -374,6 +457,18 @@
trans.sa_session.flush()
@web.expose
+ def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
+ """Return autocomplete data for history names"""
+ user = trans.get_user()
+ if not user:
+ return
+
+ ac_data = ""
+ for history in trans.sa_session.query( History ).filter_by( user=user ).filter( func.lower( History.name ) .like(q.lower() + "%") ):
+ ac_data = ac_data + history.name + "\n"
+ return ac_data
+
+ @web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
"""Import another user's history via a shared URL"""
msg = ""
diff -r 80915982fdb2 -r f776fa6045ba lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Tue Nov 03 11:28:34 2009 -0500
+++ b/lib/galaxy/web/framework/helpers/grids.py Tue Nov 03 12:58:13 2009 -0500
@@ -183,6 +183,7 @@
query=query,
cur_page_num = page_num,
num_pages = num_pages,
+ default_filter_dict=self.default_filter,
cur_filter_dict=cur_filter_dict,
sort_key=sort_key,
encoded_sort_key=encoded_sort_key,
diff -r 80915982fdb2 -r f776fa6045ba static/scripts/autocomplete_tagging.js
--- a/static/scripts/autocomplete_tagging.js Tue Nov 03 11:28:34 2009 -0500
+++ b/static/scripts/autocomplete_tagging.js Tue Nov 03 12:58:13 2009 -0500
@@ -309,7 +309,7 @@
new_value = new_value.replace(/^\s+|\s+$/g,"");
// Too short?
- if (new_value.length < 3)
+ if (new_value.length < 2)
return false;
//
diff -r 80915982fdb2 -r f776fa6045ba templates/history/grid.mako
--- a/templates/history/grid.mako Tue Nov 03 11:28:34 2009 -0500
+++ b/templates/history/grid.mako Tue Nov 03 12:58:13 2009 -0500
@@ -29,71 +29,20 @@
});
// Set up autocomplete for tag filter input.
- var t = $("#input-tag-filter");
- t.keyup( function( e )
- {
- if ( e.keyCode == 27 )
- {
- // Escape key
- $(this).trigger( "blur" );
- } else if (
- ( e.keyCode == 13 ) || // Return Key
- ( e.keyCode == 188 ) || // Comma
- ( e.keyCode == 32 ) // Space
- )
- {
- //
- // Check input.
- //
-
- new_value = this.value;
-
- // Do nothing if return key was used to autocomplete.
- if (return_key_pressed_for_autocomplete == true)
- {
- return_key_pressed_for_autocomplete = false;
- return false;
- }
-
- // Suppress space after a ":"
- if ( new_value.indexOf(": ", new_value.length - 2) != -1)
- {
- this.value = new_value.substring(0, new_value.length-1);
- return false;
- }
-
- // Remove trigger keys from input.
- if ( (e.keyCode == 188) || (e.keyCode == 32) )
- new_value = new_value.substring( 0 , new_value.length - 1 );
-
- // Trim whitespace.
- new_value = new_value.replace(/^\s+|\s+$/g,"");
-
- // Too short?
- if (new_value.length < 3)
- return false;
-
- //
- // New tag OK.
- //
- }
- });
+ var t = $("#input-tags-filter");
- // Add autocomplete to input.
- var format_item_func = function(key, row_position, num_rows, value, search_term)
- {
- tag_name_and_value = value.split(":");
- return (tag_name_and_value.length == 1 ? tag_name_and_value[0] :tag_name_and_value[1]);
- //var array = new Array(key, value, row_position, num_rows,
- //search_term ); return "\"" + array.join("*") + "\"";
- }
var autocomplete_options =
- { selectFirst: false, formatItem : format_item_func, autoFill: false, highlight: false, mustMatch: true };
+ { selectFirst: false, autoFill: false, highlight: false, mustMatch: false };
t.autocomplete("${h.url_for( controller='tag', action='tag_autocomplete_data', item_class='History' )}", autocomplete_options);
-
- $("#page-select").change(navigate_to_page);
+ // Set up autocomplete for name filter input.
+ var t2 = $("#input-name-filter");
+
+ var autocomplete_options =
+ { selectFirst: false, autoFill: false, highlight: false, mustMatch: false };
+
+ t2.autocomplete("${h.url_for( controller='history', action='name_autocomplete_data' )}", autocomplete_options);
});
## Can this be moved into base.mako?
%if refresh_frames:
@@ -125,21 +74,52 @@
%endif
%endif
+ // Filter and sort args for grid.
+ var filter_args = ${h.to_json_string(cur_filter_dict)};
+ var sort_key = "${sort_key}";
+
//
- // Add a tag to the current grid filter; this adds the tag to the filter and then issues a request to refresh the grid.
+ // Add tag to grid filter.
//
function add_tag_to_grid_filter(tag_name, tag_value)
{
- // Use tag as a filter: replace TAGNAME with tag_name and issue query.
- <%
- url_args = {}
- if "tags" in cur_filter_dict and cur_filter_dict["tags"] != "All":
- url_args["f-tags"] = cur_filter_dict["tags"].encode("utf-8") + ", TAGNAME"
- else:
- url_args["f-tags"] = "TAGNAME"
- %>
- var url_base = "${url( url_args )}";
- var url = url_base.replace("TAGNAME", tag_name);
+ // Put tag name and value together.
+ var tag = tag_name + (tag_value != null && tag_value != "" ? ":" + tag_value : "");
+ add_condition_to_grid_filter("tags", tag, true);
+ }
+
+ //
+ // Add a filter to the current grid filter; this adds the filter and then issues a request to refresh the grid.
+ //
+ function add_condition_to_grid_filter(name, value, append)
+ {
+ // Update filter arg with new condition.
+ if (append)
+ {
+ // Append value.
+ var cur_val = filter_args[name];
+ if (cur_val != "All")
+ cur_val = cur_val + ", " + value;
+ else
+ cur_val = value;
+ filter_args[name] = cur_val;
+ }
+ else
+ {
+ // Replace value.
+ filter_args[name] = value;
+ }
+
+ // Build URL with filter args, sort key.
+ var filter_arg_value_strs = new Array();
+ var i = 0;
+ for (arg in filter_args)
+ {
+ filter_arg_value_strs[i++] = "f-" + arg + "=" + filter_args[arg];
+ }
+ var filter_str = filter_arg_value_strs.join("&");
+ var url_base = "${h.url_for( controller='history', action='list')}";
+ var url = url_base + "?" + filter_str + "&sort=" + sort_key;
self.location = url;
}
@@ -154,7 +134,7 @@
var url = url_base.replace("PAGE", page_num);
self.location = url;
}
-
+
</script>
</%def>
@@ -175,47 +155,95 @@
<div class="grid-header">
<h2>${grid.title}</h2>
-
- ## Print grid filter.
- <form name="history_actions" action="javascript:add_tag_to_grid_filter($('#input-tag-filter').attr('value'))" method="get" >
- <strong>Filter: </strong>
- %for column in grid.columns:
- %if column.filterable:
- <span> by ${column.label.lower()}:</span>
- ## For now, include special case to handle tags.
- %if column.key == "tags":
- %if cur_filter_dict[column.key] != "All":
- <span class="filter" "style='font-style: italic'">
- ${cur_filter_dict[column.key]}
- </span>
- <span>|</span>
+
+ ## Search box and more options filter at top of grid.
+ <div>
+ ## Grid search. TODO: use more elegant way to get free text search column.
+ <% column = grid.columns[-1] %>
+ <% use_form = False %>
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ %if i > 0:
+ <span>|</span>
+ %endif
+ %if column.key in cur_filter_dict and cur_filter_dict[column.key] == filter.args[column.key]:
+ <span class="filter" "style='font-style: italic'">${filter.label}</span>
+ %elif filter.label == "FREETEXT":
+ <form name="history_actions"
+ action="javascript:add_condition_to_grid_filter($('#input-${column.key}-filter').attr('name'),$('#input-${column.key}-filter').attr('value'),false)"
+ method="get" >
+ ${column.label}:
+ %if column.key in cur_filter_dict and cur_filter_dict[column.key] != "All":
+ <span style="font-style: italic">${cur_filter_dict[column.key]}</span>
+ <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ <a href="${url( filter_all.get_url_args() )}"><img src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/></a>
+ |
%endif
- <input id="input-tag-filter" name="f-tags" type="text" value="" size="15"/>
- <span>|</span>
- %endif
-
- ## Handle other columns.
- %for i, filter in enumerate( column.get_accepted_filters() ):
- %if i > 0:
- <span>|</span>
- %endif
- %if cur_filter_dict[column.key] == filter.args[column.key]:
- <span class="filter" "style='font-style: italic'">${filter.label}</span>
- %else:
- <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
- %endif
- %endfor
- <span> </span>
+ <span><input id="input-${column.key}-filter" name="${column.key}" type="text" value="" size="15"/></span>
+ <% use_form = True %>
+ %else:
+ <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
%endif
%endfor
-
- ## Link to clear all filters. TODO: this should be the default filter or an empty filter.
- <%
- args = { "deleted" : "False", "tags" : "All" }
- no_filter = GridColumnFilter("Clear Filter", args)
- %>
- <span><a href="${url( no_filter.get_url_args() )}">${no_filter.label}</a></span>
- </form>
+ | <a href="" onclick="javascript:$('#more-search-options').slideToggle('fast');return false;">Advanced Search</a>
+ %if use_form:
+ </form>
+ %endif
+ </div>
+
+ ## Advanced Search
+ <div id="more-search-options" style="display: none; padding-top: 5px">
+ <table style="border: 1px solid gray;">
+ <tr><td style="text-align: left" colspan="100">
+ Advanced Search |
+ <a href=""# onclick="javascript:$('#more-search-options').slideToggle('fast');return false;">Close</a> |
+ ## Link to clear all filters.
+ <%
+ no_filter = GridColumnFilter("Clear All", default_filter_dict)
+ %>
+ <a href="${url( no_filter.get_url_args() )}">${no_filter.label}</a>
+ </td></tr>
+ %for column in grid.columns:
+ %if column.filterable:
+ <tr>
+ ## Show div if current filter has value that is different from the default filter.
+ %if cur_filter_dict[column.key] != default_filter_dict[column.key]:
+ <script type="text/javascript">
+ $('#more-search-options').css("display", "block");
+ </script>
+ %endif
+ <td style="padding-left: 10px">${column.label.lower()}:</td>
+ <td>
+ <% use_form = False %>
+ %for i, filter in enumerate( column.get_accepted_filters() ):
+ %if i > 0:
+ <span>|</span>
+ %endif
+ %if cur_filter_dict[column.key] == filter.args[column.key]:
+ <span class="filter" style="font-style: italic">${filter.label}</span>
+ %elif filter.label == "FREETEXT":
+ <form name="history_actions" action="javascript:add_condition_to_grid_filter($('#input-${column.key}-filter').attr('name'),$('#input-${column.key}-filter').attr('value'),true)"
+ method="get" >
+ %if column.key in cur_filter_dict and cur_filter_dict[column.key] != "All":
+ <span style="font-style: italic">${cur_filter_dict[column.key]}</span>
+ <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+ <a href="${url( filter_all.get_url_args() )}"><img src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/></a>
+ |
+ %endif
+ <span><input id="input-${column.key}-filter" name="${column.key}" type="text" value="" size="15"/></span>
+ <% use_form = True %>
+ %else:
+ <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
+ %endif
+ %endfor
+ %if use_form:
+ </form>
+ %endif
+ </td>
+ </tr>
+ %endif
+ %endfor
+ </table>
+ </div>
</div>
<form name="history_actions" action="${url()}" method="post" >
<input type="hidden" name="page" value="${cur_page_num}">
@@ -291,7 +319,7 @@
extra = ""
%>
%if href:
- <td><div class="menubutton split" style="float: left;"><a class="label" href="${href}">${v}${extra}</a> </td>
+ <td><div class="menubutton split" style="float: left;"><a class="label" href="${href}">${v}</a>${extra}</td>
%else:
<td >${v}${extra}</td>
%endif
diff -r 80915982fdb2 -r f776fa6045ba test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Nov 03 11:28:34 2009 -0500
+++ b/test/base/twilltestcase.py Tue Nov 03 12:58:13 2009 -0500
@@ -1,7 +1,7 @@
import pkg_resources
pkg_resources.require( "twill==0.9" )
-import StringIO, os, sys, random, filecmp, time, unittest, urllib, logging, difflib, zipfile, tempfile
+import StringIO, os, sys, random, filecmp, time, unittest, urllib, logging, difflib, zipfile, tempfile, re
from itertools import *
import twill
@@ -311,20 +311,20 @@
def view_stored_active_histories( self, check_str='' ):
self.home()
self.visit_page( "history/list" )
- self.check_page_for_string( 'Stored histories' )
+ self.check_page_for_string( 'Saved Histories' )
self.check_page_for_string( '<input type="checkbox" name="id" value=' )
- self.check_page_for_string( 'operation=Rename&id' )
- self.check_page_for_string( 'operation=Switch&id' )
- self.check_page_for_string( 'operation=Delete&id' )
+ self.check_page_for_string( 'operation=Rename' )
+ self.check_page_for_string( 'operation=Switch' )
+ self.check_page_for_string( 'operation=Delete' )
if check_str:
self.check_page_for_string( check_str )
self.home()
def view_stored_deleted_histories( self, check_str='' ):
self.home()
self.visit_page( "history/list?f-deleted=True" )
- self.check_page_for_string( 'Stored histories' )
+ self.check_page_for_string( 'Saved Histories' )
self.check_page_for_string( '<input type="checkbox" name="id" value=' )
- self.check_page_for_string( 'operation=Undelete&id' )
+ self.check_page_for_string( 'operation=Undelete' )
if check_str:
self.check_page_for_string( check_str )
self.home()
@@ -723,14 +723,14 @@
# Functions associated with browsers, cookies, HTML forms and page visits
def check_page_for_string( self, patt ):
- """Looks for 'patt' in the current browser page"""
+ """Looks for 'patt' in the current browser page"""
page = self.last_page()
for subpatt in patt.split():
if page.find( patt ) == -1:
fname = self.write_temp_file( page )
errmsg = "no match to '%s'\npage content written to '%s'" % ( patt, fname )
raise AssertionError( errmsg )
-
+
def write_temp_file( self, content ):
fd, fname = tempfile.mkstemp( suffix='.html', prefix='twilltestcase-' )
f = os.fdopen( fd, "w" )
diff -r 80915982fdb2 -r f776fa6045ba test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Tue Nov 03 11:28:34 2009 -0500
+++ b/test/functional/test_history_functions.py Tue Nov 03 12:58:13 2009 -0500
@@ -179,7 +179,7 @@
self.share_current_history( regular_user1.email,
check_str=history3.name )
# Check out list of histories to make sure history3 was shared
- self.view_stored_active_histories( check_str='operation=sharing">shared' )
+ self.view_stored_active_histories( check_str='operation=sharing' )
# Enable importing history3 via a URL
self.enable_import_via_link( self.security.encode_id( history3.id ),
check_str='Unshare',
1
0

07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/80915982fdb2
changeset: 2946:80915982fdb2
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 11:28:34 2009 -0500
description:
Fix new functional tests script return code (for buildbot)
diffstat:
scripts/functional_tests.py | 11 +++++++----
1 files changed, 7 insertions(+), 4 deletions(-)
diffs (34 lines):
diff -r c96e886f883f -r 80915982fdb2 scripts/functional_tests.py
--- a/scripts/functional_tests.py Tue Nov 03 10:26:42 2009 -0500
+++ b/scripts/functional_tests.py Tue Nov 03 11:28:34 2009 -0500
@@ -155,7 +155,7 @@
log.info( "Functional tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_port ) )
- rval = False
+ success = False
try:
@@ -186,7 +186,7 @@
result = test_runner.run( tests )
- rval = result.wasSuccessful()
+ success = result.wasSuccessful()
except:
log.exception( "Failure running tests" )
@@ -206,7 +206,10 @@
app = None
log.info( "Embedded Universe application stopped" )
- return rval
+ if success:
+ return 0
+ else:
+ return 1
if __name__ == "__main__":
- main()
+ sys.exit( main() )
1
0

07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c96e886f883f
changeset: 2945:c96e886f883f
user: rc
date: Tue Nov 03 10:26:42 2009 -0500
description:
Fixed a bug in the user selectbox in new requests page
diffstat:
lib/galaxy/web/controllers/requests_admin.py | 9 +++------
1 files changed, 3 insertions(+), 6 deletions(-)
diffs (34 lines):
diff -r 7594fc81bdfc -r c96e886f883f lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Tue Nov 03 07:58:24 2009 -0500
+++ b/lib/galaxy/web/controllers/requests_admin.py Tue Nov 03 10:26:42 2009 -0500
@@ -377,8 +377,10 @@
msg=msg,
messagetype=messagetype)
def __select_user(self, trans, userid):
+ user_list = trans.sa_session.query( trans.app.model.User )\
+ .order_by( trans.app.model.User.email.asc() )
user_ids = ['none']
- for user in trans.sa_session.query( trans.app.model.User ):
+ for user in user_list:
if not user.deleted:
user_ids.append(str(user.id))
select_user = SelectField('select_user',
@@ -388,10 +390,6 @@
select_user.add_option('Select one', 'none', selected=True)
else:
select_user.add_option('Select one', 'none')
- def __get_email(user):
- return user.email
- user_list = trans.sa_session.query( trans.app.model.User )
- #user_list.sort(key=__get_email)
for user in user_list:
if not user.deleted:
if userid == str(user.id):
@@ -399,7 +397,6 @@
else:
select_user.add_option(user.email, user.id)
return select_user
-
def __library_ui(self, trans, user, request=None, **kwd):
'''
This method creates the data library & folder selectbox for new &
1
0

07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/7594fc81bdfc
changeset: 2944:7594fc81bdfc
user: rc
date: Tue Nov 03 07:58:24 2009 -0500
description:
Changed the test user emails in the user_info functional tests
diffstat:
test/functional/test_user_info.py | 18 +++++++++---------
1 files changed, 9 insertions(+), 9 deletions(-)
diffs (60 lines):
diff -r 6e657b4aa837 -r 7594fc81bdfc test/functional/test_user_info.py
--- a/test/functional/test_user_info.py Mon Nov 02 16:52:32 2009 -0500
+++ b/test/functional/test_user_info.py Tue Nov 03 07:58:24 2009 -0500
@@ -75,7 +75,7 @@
# user a new user with 'Student' user info form
form_one = get_latest_form(form_one_name)
user_info_values=['Educational', 'Penn State']
- self.create_user_with_info( 'test1(a)bx.psu.edu', 'testuser', 'test1',
+ self.create_user_with_info( 'test11(a)bx.psu.edu', 'testuser', 'test11',
user_info_forms='multiple',
user_info_form_id=form_one.id,
user_info_values=user_info_values )
@@ -98,7 +98,7 @@
# user a new user with 'Student' user info form
form_one = get_latest_form(form_one_name)
user_info_values=['Educational', 'Penn State']
- self.create_user_with_info( 'test2(a)bx.psu.edu', 'testuser', 'test2',
+ self.create_user_with_info( 'test12(a)bx.psu.edu', 'testuser', 'test12',
user_info_forms='single',
user_info_form_id=form_one.id,
user_info_values=user_info_values )
@@ -110,31 +110,31 @@
def test_015_edit_user_info( self ):
"""Testing editing user info as a regular user"""
self.logout()
- self.login( 'test1(a)bx.psu.edu' )
+ self.login( 'test11(a)bx.psu.edu' )
user = sa_session.query( galaxy.model.User ) \
- .filter( and_( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ) ).first()
- self.edit_login_info( new_email='test1_new(a)bx.psu.edu', new_username='test1_new' )
+ .filter( and_( galaxy.model.User.table.c.email=='test11(a)bx.psu.edu' ) ).first()
+ self.edit_login_info( new_email='test11_new(a)bx.psu.edu', new_username='test11_new' )
self.change_password('testuser', 'new_testuser')
self.edit_user_info( ['Research', 'PSU'] )
def test_020_create_user_as_admin( self ):
''' Testing creating users as an admin '''
self.logout()
- self.login( 'test2(a)bx.psu.edu' )
+ self.login( 'test(a)bx.psu.edu' )
form_one = get_latest_form(form_one_name)
user_info_values=['Educational', 'Penn State']
- self.create_user_with_info( 'test3(a)bx.psu.edu', 'testuser', 'test3',
+ self.create_user_with_info( 'test13(a)bx.psu.edu', 'testuser', 'test13',
user_info_forms='single',
user_info_form_id=form_one.id,
user_info_values=user_info_values )
self.logout()
self.login( 'test(a)bx.psu.edu' )
user = sa_session.query( galaxy.model.User ) \
- .filter( and_( galaxy.model.User.table.c.email=='test3(a)bx.psu.edu' ) ).first()
+ .filter( and_( galaxy.model.User.table.c.email=='test13(a)bx.psu.edu' ) ).first()
self.home()
page = "admin/users?id=%s&operation=information&f-deleted=False" % self.security.encode_id( user.id )
self.visit_page( page )
self.check_page_for_string( 'Manage User Information' )
- self.check_page_for_string( 'test3(a)bx.psu.edu' )
+ self.check_page_for_string( 'test13(a)bx.psu.edu' )
for value in user_info_values:
self.check_page_for_string( value )
# lets delete the 'Student' user info form
1
0