galaxy-dev
Threads by month
- ----- 2026 -----
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10009 discussions
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0d534da7f0ff
changeset: 2964:0d534da7f0ff
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Nov 04 13:09:37 2009 -0500
description:
Eliminate printing PYTHONPATH to stderr in the cleanup_datasets.py script.
diffstat:
scripts/cleanup_datasets/cleanup_datasets.py | 1 -
1 files changed, 0 insertions(+), 1 deletions(-)
diffs (11 lines):
diff -r fdf07565eedf -r 0d534da7f0ff scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py Wed Nov 04 12:58:48 2009 -0500
+++ b/scripts/cleanup_datasets/cleanup_datasets.py Wed Nov 04 13:09:37 2009 -0500
@@ -387,7 +387,6 @@
class CleanupDatasetsApplication( object ):
"""Encapsulates the state of a Universe application"""
def __init__( self, database_connection=None, file_path=None ):
- print >> sys.stderr, "python path is: " + ", ".join( sys.path )
if database_connection is None:
raise Exception( "CleanupDatasetsApplication requires a database_connection value" )
if file_path is None:
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/adfcd8bb13d1
changeset: 2961:adfcd8bb13d1
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Nov 04 12:21:13 2009 -0500
description:
Quick fix for maf_utilities.src_split.
diffstat:
lib/galaxy/tools/util/maf_utilities.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 91a3bbb52d7a -r adfcd8bb13d1 lib/galaxy/tools/util/maf_utilities.py
--- a/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 12:15:53 2009 -0500
+++ b/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 12:21:13 2009 -0500
@@ -23,7 +23,7 @@
fields = src.split( SRC_SPLIT_CHAR, 1 )
spec = fields.pop( 0 )
if fields:
- chrom = fields
+ chrom = fields.pop( 0 )
else:
chrom = spec
return spec, chrom
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/92d395c8614b
changeset: 2962:92d395c8614b
user: rc
date: Wed Nov 04 12:30:36 2009 -0500
description:
Fixed a bug in user info - adding new address
diffstat:
lib/galaxy/web/controllers/user.py | 114 +++++++++++++++++++++++---------------
templates/user/edit_address.mako | 2 +-
templates/user/info.mako | 2 +-
templates/user/new_address.mako | 33 +++++++++++
4 files changed, 104 insertions(+), 47 deletions(-)
diffs (200 lines):
diff -r adfcd8bb13d1 -r 92d395c8614b lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Wed Nov 04 12:21:13 2009 -0500
+++ b/lib/galaxy/web/controllers/user.py Wed Nov 04 12:30:36 2009 -0500
@@ -272,7 +272,7 @@
user_address.country = util.restore_text(params.get('field_%i_country' % index, ''))
user_address.phone = util.restore_text(params.get('field_%i_phone' % index, ''))
user_address.flush()
- trans.user.refresh()
+ trans.sa_session.refresh( user )
values.append(int(user_address.id))
elif value == unicode('none'):
values.append('')
@@ -618,57 +618,81 @@
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
@web.expose
- def new_address( self, trans, short_desc='', name='', institution='', address1='',
- address2='', city='', state='', postal_code='', country='', phone='' ):
- if trans.app.config.require_login:
- refresh_frames = [ 'masthead', 'history', 'tools' ]
- else:
- refresh_frames = [ 'masthead', 'history' ]
+ def new_address( self, trans, **kwd ):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ admin_view = params.get( 'admin_view', 'False' )
+ error = ''
+ user = trans.sa_session.query( trans.app.model.User ).get( int( params.get( 'user_id', None ) ) )
if not trans.app.config.allow_user_creation and not trans.user_is_admin():
return trans.show_error_message( 'User registration is disabled. Please contact your Galaxy administrator for an account.' )
- short_desc_error = name_error = institution_error = address1_error = city_error = None
- address2_error = state_error = postal_code_error = country_error = phone_error = None
- if short_desc:
- if not len( short_desc ):
- short_desc_error = 'Enter a short description for this address'
- elif not len( name ):
- name_error = 'Enter the full name'
- elif not len( institution ):
- institution_error = 'Enter the institution associated with the user'
- elif not len ( address1 ):
- address1_error = 'Enter the address'
- elif not len( city ):
- city_error = 'Enter the city'
- elif not len( state ):
- state_error = 'Enter the state/province/region'
- elif not len( postal_code ):
- postal_code_error = 'Enter the postal code'
- elif not len( country ):
- country_error = 'Enter the country'
+ if params.get( 'save_new_address_button', None ) == 'Save':
+ if not len( util.restore_text( params.get( 'short_desc', '' ) ) ):
+ error = 'Enter a short description for this address'
+ elif not len( util.restore_text( params.get( 'name', '' ) ) ):
+ error = 'Enter the full name'
+ elif not len( util.restore_text( params.get( 'institution', '' ) ) ):
+ error = 'Enter the institution associated with the user'
+ elif not len ( util.restore_text( params.get( 'address1', '' ) ) ):
+ error = 'Enter the address'
+ elif not len( util.restore_text( params.get( 'city', '' ) ) ):
+ error = 'Enter the city'
+ elif not len( util.restore_text( params.get( 'state', '' ) ) ):
+ error = 'Enter the state/province/region'
+ elif not len( util.restore_text( params.get( 'postal_code', '' ) ) ):
+ error = 'Enter the postal code'
+ elif not len( util.restore_text( params.get( 'country', '' ) ) ):
+ error = 'Enter the country'
else:
- user_address = trans.app.model.UserAddress( user=trans.user, desc=short_desc,
- name=name, institution=institution,
- address=address1+' '+address2, city=city,
- state=state, postal_code=postal_code,
- country=country, phone=phone)
+ user_address = trans.app.model.UserAddress( user=user )
+ user_address.desc = util.restore_text( params.get( 'short_desc', '' ) )
+ user_address.name = util.restore_text( params.get( 'name', '' ) )
+ user_address.institution = util.restore_text( params.get( 'institution', '' ) )
+ user_address.address = util.restore_text( params.get( 'address1', '' ) )+' '+util.restore_text( params.get( 'address2', '' ) )
+ user_address.city = util.restore_text( params.get( 'city', '' ) )
+ user_address.state = util.restore_text( params.get( 'state', '' ) )
+ user_address.postal_code = util.restore_text( params.get( 'postal_code', '' ) )
+ user_address.country = util.restore_text( params.get( 'country', '' ) )
+ user_address.phone = util.restore_text( params.get( 'phone', '' ) )
user_address.flush()
+ msg = 'Address <b>%s</b> has been added' % user_address.desc
+ if admin_view == 'True':
+ return trans.response.send_redirect( web.url_for( controller='user',
+ action='show_info',
+ admin_view=True,
+ user_id=user.id,
+ msg=msg,
+ messagetype='done') )
return trans.response.send_redirect( web.url_for( controller='user',
action='show_info',
- msg='Address <b>%s</b> has been added' % user_address.desc,
+ msg=msg,
messagetype='done') )
-
- return trans.show_form(
- web.FormBuilder( web.url_for(), "New address", submit_text="Save" )
- .add_text( "short_desc", "Short address description", value=short_desc, error=short_desc_error )
- .add_text( "name", "Name", value=name, error=name_error )
- .add_text( "institution", "Institution", value=institution, error=institution_error )
- .add_text( "address1", "Address Line 1", value=address1, error=address1_error )
- .add_text( "address2", "Address Line 2", value=address2, error=address2_error )
- .add_text( "city", "City", value=city, error=city_error )
- .add_text( "state", "State/Province/Region", value=state, error=state_error )
- .add_text( "postal_code", "Postal Code", value=postal_code, error=postal_code_error )
- .add_text( "country", "Country", value=country, error=country_error )
- .add_text( "phone", "Phone", value=phone, error=phone_error ) )
+ else:
+ # show the address form with the current values filled in
+ # create the widgets for each address field
+ widgets = []
+ widgets.append(dict(label='Short description',
+ widget=TextField( 'short_desc', 40, '' ) ) )
+ widgets.append(dict(label='Name',
+ widget=TextField( 'name', 40, '' ) ) )
+ widgets.append(dict(label='Institution',
+ widget=TextField( 'institution', 40, '' ) ) )
+ widgets.append(dict(label='Address Line 1',
+ widget=TextField( 'address1', 40, '' ) ) )
+ widgets.append(dict(label='City',
+ widget=TextField( 'city', 40, '' ) ) )
+ widgets.append(dict(label='State',
+ widget=TextField( 'state', 40, '' ) ) )
+ widgets.append(dict(label='Postal Code',
+ widget=TextField( 'postal_code', 40, '' ) ) )
+ widgets.append(dict(label='Country',
+ widget=TextField( 'country', 40, '' ) ) )
+ widgets.append(dict(label='Phone',
+ widget=TextField( 'phone', 40, '' ) ) )
+ return trans.fill_template( 'user/new_address.mako', user=user,
+ admin_view=admin_view,
+ widgets=widgets, msg=msg, messagetype=messagetype)
@web.expose
def edit_address( self, trans, **kwd ):
params = util.Params( kwd )
diff -r adfcd8bb13d1 -r 92d395c8614b templates/user/edit_address.mako
--- a/templates/user/edit_address.mako Wed Nov 04 12:21:13 2009 -0500
+++ b/templates/user/edit_address.mako Wed Nov 04 12:30:36 2009 -0500
@@ -11,7 +11,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='user', action='show_info')}">
+ <a class="action-button" href="${h.url_for( controller='user', action='show_info', admin_view=admin_view, user_id=user.id)}">
<span>Manage User Information</span></a>
</li>
</ul>
diff -r adfcd8bb13d1 -r 92d395c8614b templates/user/info.mako
--- a/templates/user/info.mako Wed Nov 04 12:21:13 2009 -0500
+++ b/templates/user/info.mako Wed Nov 04 12:30:36 2009 -0500
@@ -99,7 +99,7 @@
</div>
</form>
%endif
- <form name="user_info" id="user_info" action="${h.url_for( controller='user', action='new_address' )}" method="post" >
+ <form name="user_info" id="user_info" action="${h.url_for( controller='user', action='new_address', user_id=user.id, admin_view=admin_view )}" method="post" >
<div class="toolFormTitle">User Addresses</div>
<div class="toolFormBody">
%if user.addresses:
diff -r adfcd8bb13d1 -r 92d395c8614b templates/user/new_address.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/user/new_address.mako Wed Nov 04 12:30:36 2009 -0500
@@ -0,0 +1,33 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+%if msg:
+ ${render_msg( msg, messagetype )}
+%endif
+</br>
+</br>
+<h3>New address</h3>
+
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='user', action='show_info', admin_view=admin_view, user_id=user.id)}">
+ <span>Manage User Information</span></a>
+ </li>
+</ul>
+<div class="toolForm">
+<form name="login_info" id="login_info" action="${h.url_for( controller='user', action='new_address', admin_view=admin_view, user_id=user.id )}" method="post" >
+ <div class="toolFormTitle">New address</div>
+ <div class="toolFormBody">
+ %for field in widgets:
+ <div class="form-row">
+ <label>${field[ 'label' ]}</label>
+ ${field[ 'widget' ].get_html()}
+ </div>
+ %endfor
+ <div class="form-row">
+ <input type="submit" name="save_new_address_button" value="Save">
+ </div>
+ </div>
+</form>
+</div>
\ No newline at end of file
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/fdf07565eedf
changeset: 2963:fdf07565eedf
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 04 12:58:48 2009 -0500
description:
Add the GeneTrack egg. Also includes some enhancements and cleanup of the eggs/fetch/scramble code.
diffstat:
dist-eggs.ini | 4 +
eggs.ini | 5 +-
lib/galaxy/config.py | 13 +++
lib/galaxy/eggs/__init__.py | 149 ++++++++++++++----------------------
scripts/fetch_eggs.py | 36 ++++----
scripts/scramble.py | 28 +++---
scripts/scramble/scripts/GeneTrack.py | 52 +++++++++++++
7 files changed, 164 insertions(+), 123 deletions(-)
diffs (490 lines):
diff -r 92d395c8614b -r fdf07565eedf dist-eggs.ini
--- a/dist-eggs.ini Wed Nov 04 12:30:36 2009 -0500
+++ b/dist-eggs.ini Wed Nov 04 12:58:48 2009 -0500
@@ -57,3 +57,7 @@
all = py2.4-all py2.5-all py2.6-all
; default hosts for platform-inspecific eggs
noplatform = py2.4-linux-i686-ucs4 py2.5-linux-i686-ucs4 py2.6-linux-i686-ucs4
+
+[ignore]
+; Don't build these eggs on these platforms:
+GeneTrack = py2.4-noplatform
diff -r 92d395c8614b -r fdf07565eedf eggs.ini
--- a/eggs.ini Wed Nov 04 12:30:36 2009 -0500
+++ b/eggs.ini Wed Nov 04 12:58:48 2009 -0500
@@ -9,7 +9,7 @@
[general]
repository = http://eggs.g2.bx.psu.edu
; these eggs must be scrambled for your local environment
-no_download = pbs_python DRMAA_python
+no_auto = pbs_python DRMAA_python
[eggs:platform]
bx_python = 0.5.0
@@ -31,6 +31,7 @@
decorator = 3.1.2
docutils = 0.4
elementtree = 1.2.6_20050316
+GeneTrack = 2.0.0_beta_1
lrucache = 0.2
;lsprof - james
Mako = 0.2.5
@@ -61,6 +62,7 @@
python_lzo = _static
flup = .dev_r2311
bx_python = _dev_r4bf1f32e6b76
+GeneTrack = _dev_raa786e9fc131d998e532a1aef39d108850c9e93d
; nose = .dev_r7156749efc58
; source location, necessary for scrambling
@@ -81,6 +83,7 @@
decorator = http://pypi.python.org/packages/source/d/decorator/decorator-3.1.2.tar.gz
docutils = http://downloads.sourceforge.net/docutils/docutils-0.4.tar.gz
elementtree = http://effbot.org/downloads/elementtree-1.2.6-20050316.tar.gz
+GeneTrack = http://github.com/ialbert/genetrack-central/tarball/aa786e9fc131d998e532a1a…
lrucache = http://evan.prodromou.name/lrucache/lrucache-0.2.tar.gz
Mako = http://www.makotemplates.org/downloads/Mako-0.2.5.tar.gz
MyghtyUtils = http://cheeseshop.python.org/packages/source/M/MyghtyUtils/MyghtyUtils-0.52…
diff -r 92d395c8614b -r fdf07565eedf lib/galaxy/config.py
--- a/lib/galaxy/config.py Wed Nov 04 12:30:36 2009 -0500
+++ b/lib/galaxy/config.py Wed Nov 04 12:58:48 2009 -0500
@@ -8,6 +8,9 @@
import ConfigParser
from galaxy.util import string_as_bool
+from galaxy import eggs
+import pkg_resources
+
log = logging.getLogger( __name__ )
def resolve_path( path, root ):
@@ -126,6 +129,16 @@
for path in self.tool_config, self.datatypes_config:
if not os.path.isfile(path):
raise ConfigurationError("File not found: %s" % path )
+ # Check job runners so the admin can scramble dependent egg.
+ if self.start_job_runners is not None:
+ runner_to_egg = dict( pbs = 'pbs_python', sge = 'DRMAA_python' )
+ for runner in self.start_job_runners.split( ',' ):
+ try:
+ pkg_resources.require( runner_to_egg[runner] )
+ except eggs.EggNotFetchable, e:
+ raise eggs.EggNotFetchable( 'You must scramble the %s egg to use the %s job runner. Instructions are available at:\n http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster' % ( runner_to_egg[runner], runner ) )
+ except KeyError:
+ raise Exception( 'No such job runner: %s. Please double-check the value of start_job_runners in universe_wsgi.ini' % runner )
def is_admin_user( self,user ):
"""
diff -r 92d395c8614b -r fdf07565eedf lib/galaxy/eggs/__init__.py
--- a/lib/galaxy/eggs/__init__.py Wed Nov 04 12:30:36 2009 -0500
+++ b/lib/galaxy/eggs/__init__.py Wed Nov 04 12:58:48 2009 -0500
@@ -14,11 +14,12 @@
# within tools. i don't know of any way around this. -ndc
galaxy_dir = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "..", "..", ".." ) )
-class NewEgg( Exception ):
- pass
-
class EggNotFetchable( Exception ):
- pass
+ def __init__( self, eggs ):
+ if type( eggs ) in ( list, tuple ):
+ self.eggs = eggs
+ else:
+ self.eggs = [ eggs ]
class PlatformNotSupported( Exception ):
pass
@@ -157,6 +158,14 @@
# scramble helper methods
def get_archive_path( self, url ):
return os.path.join( Egg.archive_dir, (url.rsplit( '/', 1 ))[1] )
+ def get_tld( self, names ):
+ tld = names[0].split( os.path.sep, 1 )[0]
+ for name in names:
+ try:
+ assert tld == name.split( os.path.sep, 1 )[0]
+ except:
+ raise Exception( "get_tld(): Archive contains multiple top-level directories!" )
+ return tld
def fetch_source( self ):
if not os.access( Egg.archive_dir, os.F_OK ):
os.makedirs( Egg.archive_dir )
@@ -195,7 +204,7 @@
log.warning( " %s" % self.buildpath )
def unpack_zip( self, source_path, unpack_path ):
z = zipfile.ZipFile( source_path, "r" )
- tld = ( z.namelist()[0].split( os.path.sep, 1 ) )[0]
+ tld = self.get_tld( z.namelist() )
cur = os.getcwd()
os.chdir( unpack_path )
for fn in z.namelist():
@@ -211,12 +220,12 @@
os.chdir( cur )
def unpack_tar( self, source_path, unpack_path ):
t = tarfile.open( source_path, "r" )
- tld = ( t.getnames()[0].split( os.path.sep, 1 ) )[0]
+ members = filter( lambda x: "ez_setup" not in x.name and "pax_global_header" != x.name, t.getmembers() )
+ tld = self.get_tld( [ x.name for x in members ] )
cur = os.getcwd()
os.chdir( unpack_path )
- for member in t.getmembers():
- if "ez_setup" not in member.name:
- t.extract( member )
+ for member in members:
+ t.extract( member )
t.close()
os.rename( tld, self.name )
os.chdir( cur )
@@ -265,7 +274,7 @@
self.eggs = {}
self.config = CSConfigParser()
self.repo = None
- self.no_download = []
+ self.no_auto = []
self.platform = { 'peak' : get_platform( platform=True, peak=True ), 'galaxy' : get_platform( platform=True, peak=False ) }
self.noplatform = { 'peak' : get_platform( platform=False, peak=True ), 'galaxy' : get_platform( platform=False, peak=False ) }
def parse( self ):
@@ -273,7 +282,7 @@
raise Exception( "unable to read egg config from %s" % Crate.config_file )
try:
self.repo = self.config.get( "general", "repository" )
- self.no_download = self.config.get( "general", "no_download" ).split()
+ self.no_auto = self.config.get( "general", "no_auto" ).split()
except ConfigParser.NoSectionError:
raise Exception( "eggs.ini is missing required section [general]" )
#except ConfigParser.NoOptionError:
@@ -316,19 +325,19 @@
return True
def fetch( self, ignore=[] ):
"""
- Fetch all eggs in the crate (ignoring any that you want to
- ignore). If your platform isn't available, it'll attempt to
- download all the noplatform eggs before failing.
+ Fetch all eggs in the crate (ignoring any that you want to
+ ignore). If your platform isn't available, it'll attempt to
+ download all the noplatform eggs before failing.
"""
skip_platform = False
- ignore.extend( self.no_download )
+ ignore.extend( self.no_auto )
+ missing = []
try:
f = urllib2.urlopen( "%s/%s" % ( self.repo, self.platform['galaxy'] ) )
f.close()
except urllib2.HTTPError, e:
if e.code == 404:
skip_platform = True
- missing = []
for egg in self.eggs.itervalues():
if ignore is not None:
if egg.name in ignore:
@@ -336,11 +345,18 @@
if skip_platform and egg.platform['galaxy'] == self.platform['galaxy']:
missing.append( egg.name )
continue
- egg.fetch()
+ try:
+ egg.fetch()
+ except EggNotFetchable:
+ missing.append( egg.name )
if skip_platform:
raise PlatformNotSupported( self.platform['galaxy'] )
+ if missing:
+ raise EggNotFetchable( missing )
return True
def scramble( self, ignore=None ):
+ # Crate-scrambling the no_auto eggs makes no sense
+ ignore.extend( self.no_auto )
for egg in self.eggs.itervalues():
if ignore is not None:
if egg.name in ignore:
@@ -379,21 +395,14 @@
if self.config.read( DistCrate.dist_config_file ) == []:
raise Exception( "unable to read dist egg config from %s" % DistCrate.dist_config_file )
try:
- self.hosts = self.dictize_list_of_tuples( self.config.items( "hosts" ) )
- self.groups = self.dictize_list_of_tuples( self.config.items( "groups" ) )
+ self.hosts = dict( self.config.items( "hosts" ) )
+ self.groups = dict( self.config.items( "groups" ) )
+ self.ignore = dict( self.config.items( "ignore" ) )
except ConfigParser.NoSectionError, e:
raise Exception( "eggs.ini is missing required section: %s" % e )
self.platforms = self.get_platforms( self.build_on )
self.noplatforms = self.get_platforms( 'noplatform' )
Crate.parse( self )
- def dictize_list_of_tuples( self, lot ):
- """
- Makes a list of 2-value tuples into a dict.
- """
- d = {}
- for k, v in lot:
- d[k] = v
- return d
def get_platforms( self, wanted ):
# find all the members of a group and process them
if self.groups.has_key( wanted ):
@@ -409,8 +418,8 @@
raise Exception( "unknown platform: %s" % wanted )
def parse_egg_section( self, eggs, type ):
"""
- Overrides the base class's method. Here we use the third arg
- to find out what type of egg we'll be building.
+ Overrides the base class's method. Here we use the third arg
+ to find out what type of egg we'll be building.
"""
if type == "platform":
platforms = self.platforms
@@ -418,14 +427,16 @@
platforms = self.noplatforms
for name, version in eggs:
for platform in platforms:
- # can't use the regular methods here because we're not
- # actually ON the target platform
+ # can't use the regular methods here because we're not
+ # actually ON the target platform
if type == "platform":
gplat = platform
pplat = platform.rsplit('-', 1)[0]
elif type == "noplatform":
gplat = "%s-noplatform" % platform.split('-', 1)[0]
pplat = platform.split('-', 1)[0]
+ if name in self.ignore and gplat in self.ignore[name].split():
+ continue
egg = Egg()
try:
egg.tag = self.config.get( "tags", name )
@@ -448,75 +459,31 @@
class GalaxyConfig:
config_file = os.path.join( galaxy_dir, "universe_wsgi.ini" )
+ always_conditional = ( 'GeneTrack', )
def __init__( self ):
self.config = ConfigParser.ConfigParser()
if self.config.read( GalaxyConfig.config_file ) == []:
raise Exception( "error: unable to read Galaxy config from %s" % GalaxyConfig.config_file )
# TODO: conditionals should really be handled better than this
def check_conditional( self, egg_name ):
- if egg_name == "psycopg2":
+ if egg_name == "pysqlite":
+ # SQLite is different since it can be specified in two config vars and defaults to True
try:
- if self.config.get( "app:main", "database_connection" ).startswith( "postgres://" ):
- return True
- else:
- return False
+ return self.config.get( "app:main", "database_connection" ).startswith( "sqlite://" )
+ except:
+ return True
+ else:
+ try:
+ return { "psycopg2": lambda: self.config.get( "app:main", "database_connection" ).startswith( "postgres://" ),
+ "MySQL_python": lambda: self.config.get( "app:main", "database_connection" ).startswith( "mysql://" ),
+ "DRMAA_python": lambda: "sge" in self.config.get( "app:main", "start_job_runners" ).split(","),
+ "pbs_python": lambda: "pbs" in self.config.get( "app:main", "start_job_runners" ).split(","),
+ "threadframe": lambda: self.config.get( "app:main", "use_heartbeat" ),
+ "guppy": lambda: self.config.get( "app:main", "use_memdump" ),
+ "GeneTrack": lambda: sys.version_info[:2] >= ( 2, 5 ),
+ }.get( egg_name, lambda: True )()
except:
return False
- elif egg_name == "pysqlite":
- try:
- # database connection is the sqlite alchemy dialect (not really
- # a documented usage in Galaxy, but it would work)
- if self.config.get( "app:main", "database_connection" ).startswith( "sqlite://" ):
- return True
- else:
- return False
- # database connection is unset, so sqlite is the default
- except:
- return True
- elif egg_name == "DRMAA_python":
- try:
- runners = self.config.get( "app:main", "start_job_runners" ).split(",")
- if "sge" in runners:
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "pbs_python":
- try:
- runners = self.config.get( "app:main", "start_job_runners" ).split(",")
- if "pbs" in runners:
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "threadframe":
- try:
- if self.config.get( "app:main", "use_heartbeat" ):
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "guppy":
- try:
- if self.config.get( "app:main", "use_memdump" ):
- return True
- else:
- return False
- except:
- return False
- elif egg_name == "MySQL_python":
- try:
- if self.config.get( "app:main", "database_connection" ).startswith( "mysql://" ):
- return True
- else:
- return False
- except:
- return False
- else:
- return True
def require( pkg ):
# add the egg dirs to sys.path if they're not already there
diff -r 92d395c8614b -r fdf07565eedf scripts/fetch_eggs.py
--- a/scripts/fetch_eggs.py Wed Nov 04 12:30:36 2009 -0500
+++ b/scripts/fetch_eggs.py Wed Nov 04 12:58:48 2009 -0500
@@ -25,26 +25,28 @@
c.platform = { 'peak' : sys.argv[2].rsplit('-',1)[0], 'galaxy' : sys.argv[2] }
c.parse()
try:
+ galaxy_config = GalaxyConfig()
+ names = []
if len( sys.argv ) == 1:
- galaxy_config = GalaxyConfig()
- ignore = []
- for name in c.get_names():
- if not galaxy_config.check_conditional( name ):
- ignore.append( name )
- c.fetch( ignore=ignore )
+ names = c.get_names()
+ elif sys.argv[1] == 'all':
+ names = galaxy_config.always_conditional
else:
- if sys.argv[1] == 'all':
- c.fetch()
- else:
- egg = c.get( sys.argv[1] )
- if egg is None:
- print "error: %s not in eggs.ini" % sys.argv[1]
- sys.exit( 1 )
- egg.fetch()
+ # Fetch a specific egg
+ egg = c.get( sys.argv[1] )
+ if egg is None:
+ print "error: %s not in eggs.ini" % sys.argv[1]
+ sys.exit( 1 )
+ egg.fetch()
+ sys.exit( 0 )
+ ignore = filter( lambda x: not galaxy_config.check_conditional( x ), list( names ) )
+ c.fetch( ignore )
except EggNotFetchable, e:
- print "One of the python eggs necessary to run Galaxy couldn't be downloaded"
- print "automatically. You may want to try building it by hand with:"
- print " python scripts/scramble.py %s" % e
+ print "One or more of the python eggs necessary to run Galaxy couldn't be"
+ print "downloaded automatically. You may want to try building them by"
+ print "hand with:"
+ for egg in e.eggs:
+ print " python scripts/scramble.py %s" % egg
sys.exit( 1 )
except PlatformNotSupported, e:
print "Your platform (%s) is not supported." % e
diff -r 92d395c8614b -r fdf07565eedf scripts/scramble.py
--- a/scripts/scramble.py Wed Nov 04 12:30:36 2009 -0500
+++ b/scripts/scramble.py Wed Nov 04 12:58:48 2009 -0500
@@ -18,19 +18,19 @@
c = Crate()
c.parse()
+galaxy_config = GalaxyConfig()
+names = []
if len( sys.argv ) == 1:
- galaxy_config = GalaxyConfig()
- ignore = []
- for name in c.get_names():
- if not galaxy_config.check_conditional( name ):
- ignore.append( name )
- c.scramble( ignore=ignore )
+ names = c.get_names()
+elif sys.argv[1] == 'all':
+ names = galaxy_config.always_conditional
else:
- if sys.argv[1] == 'all':
- c.scramble()
- else:
- egg = c.get( sys.argv[1] )
- if egg is None:
- print "error: %s not in eggs.ini" % sys.argv[1]
- sys.exit( 1 )
- egg.scramble()
+# Scramble a specific egg
+ egg = c.get( sys.argv[1] )
+ if egg is None:
+ print "error: %s not in eggs.ini" % sys.argv[1]
+ sys.exit( 1 )
+ egg.scramble()
+ sys.exit( 0 )
+ignore = filter( lambda x: not galaxy_config.check_conditional( x ), list( names ) )
+c.scramble( ignore=ignore )
diff -r 92d395c8614b -r fdf07565eedf scripts/scramble/scripts/GeneTrack.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/GeneTrack.py Wed Nov 04 12:58:48 2009 -0500
@@ -0,0 +1,52 @@
+import os, sys, shutil
+
+# change back to the build dir
+if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+# find setuptools
+scramble_lib = os.path.join( "..", "..", "..", "lib" )
+sys.path.append( scramble_lib )
+from ez_setup import use_setuptools
+use_setuptools( download_delay=8, to_dir=scramble_lib )
+from setuptools import *
+
+# get the tag
+if os.access( ".galaxy_tag", os.F_OK ):
+ tagfile = open( ".galaxy_tag", "r" )
+ tag = tagfile.readline().strip()
+else:
+ tag = None
+
+# in case you're running this by hand from a dirty module source dir
+for dir in [ "build", "dist" ]:
+ if os.access( dir, os.F_OK ):
+ print "scramble.py: removing dir:", dir
+ shutil.rmtree( dir )
+
+# reset args for distutils
+me = sys.argv[0]
+sys.argv = [ me ]
+sys.argv.append( "egg_info" )
+if tag is not None:
+ #sys.argv.append( "egg_info" )
+ sys.argv.append( "--tag-build=%s" %tag )
+# svn revision (if any) is handled directly in tag-build
+sys.argv.append( "--no-svn-revision" )
+sys.argv.append( "bdist_egg" )
+
+print "scramble.py: Creating setup.py for GeneTrack"
+setup_py = """from setuptools import setup, find_packages
+setup(
+ name = "GeneTrack",
+ version = "2.0.0-beta-1",
+ package_data = {'':["*.*"]},
+ packages = find_packages(),
+ zip_safe = True,
+)
+"""
+open( 'setup.py', 'w' ).write( setup_py )
+
+
+# do it
+execfile( "setup.py", globals(), locals() )
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/91a3bbb52d7a
changeset: 2960:91a3bbb52d7a
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Wed Nov 04 12:15:53 2009 -0500
description:
Fix maf_utilities.src_split. Should resolve tickets #200, #215 and #220.
Note: Do not use bx.align.src_split in Galaxy.
diffstat:
lib/galaxy/datatypes/converters/maf_to_interval_converter.py | 2 +-
lib/galaxy/datatypes/converters/maf_to_interval_converter.xml | 2 +-
lib/galaxy/tools/util/maf_utilities.py | 11 +++++++----
3 files changed, 9 insertions(+), 6 deletions(-)
diffs (49 lines):
diff -r 4bca8f8ed94d -r 91a3bbb52d7a lib/galaxy/datatypes/converters/maf_to_interval_converter.py
--- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py Wed Nov 04 11:35:50 2009 -0500
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py Wed Nov 04 12:15:53 2009 -0500
@@ -21,7 +21,7 @@
for block in bx.align.maf.Reader( open( input_name, 'r' ) ):
for c in maf_utilities.iter_components_by_src_start( block, species ):
if c is not None:
- out.write( "%s\t%i\t%i\t%s\n" % ( bx.align.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
+ out.write( "%s\t%i\t%i\t%s\n" % ( maf_utilities.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
count += 1
except Exception, e:
print >> sys.stderr, "There was a problem processing your input: %s" % e
diff -r 4bca8f8ed94d -r 91a3bbb52d7a lib/galaxy/datatypes/converters/maf_to_interval_converter.xml
--- a/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml Wed Nov 04 11:35:50 2009 -0500
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml Wed Nov 04 12:15:53 2009 -0500
@@ -1,4 +1,4 @@
-<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals" version="1.0.1">
+<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals" version="1.0.2">
<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
<command interpreter="python">maf_to_interval_converter.py $output1 $input1 ${input1.metadata.dbkey}</command>
<inputs>
diff -r 4bca8f8ed94d -r 91a3bbb52d7a lib/galaxy/tools/util/maf_utilities.py
--- a/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 11:35:50 2009 -0500
+++ b/lib/galaxy/tools/util/maf_utilities.py Wed Nov 04 12:15:53 2009 -0500
@@ -20,9 +20,12 @@
SRC_SPLIT_CHAR = '.'
def src_split( src ):
- spec, chrom = bx.align.maf.src_split( src )
- if None in [ spec, chrom ]:
- spec = chrom = src
+ fields = src.split( SRC_SPLIT_CHAR, 1 )
+ spec = fields.pop( 0 )
+ if fields:
+ chrom = fields
+ else:
+ chrom = spec
return spec, chrom
def src_merge( spec, chrom, contig = None ):
@@ -530,7 +533,7 @@
if suffix:
header = "%s%s" % ( header, suffix )
else:
- header = "%s%s" % ( header, bx.align.src_split( component.src )[ 0 ] )
+ header = "%s%s" % ( header, src_split( component.src )[ 0 ] )
return header
def get_attributes_from_fasta_header( header ):
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/0e78d558fe57
changeset: 2958:0e78d558fe57
user: rc
date: Wed Nov 04 10:16:20 2009 -0500
description:
Fixed the ordering in selectboxes
Fixed a bug in editing samples in the request page
diffstat:
lib/galaxy/model/mapping.py | 3 ++-
lib/galaxy/web/controllers/forms.py | 4 +++-
lib/galaxy/web/controllers/requests.py | 9 +++++----
lib/galaxy/web/controllers/requests_admin.py | 22 ++++++++++++----------
templates/admin/requests/show_request.mako | 2 +-
templates/requests/show_request.mako | 4 ----
test/base/twilltestcase.py | 2 +-
7 files changed, 24 insertions(+), 22 deletions(-)
diffs (191 lines):
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/model/mapping.py Wed Nov 04 10:16:20 2009 -0500
@@ -652,7 +652,8 @@
primaryjoin=( Request.table.c.user_id == User.table.c.id ),
backref="requests" ),
samples=relation( Sample,
- primaryjoin=( Request.table.c.id == Sample.table.c.request_id ) ),
+ primaryjoin=( Request.table.c.id == Sample.table.c.request_id ),
+ order_by=asc(Sample.table.c.update_time) ),
folder=relation( LibraryFolder,
primaryjoin=( Request.table.c.folder_id == LibraryFolder.table.c.id ) ),
library=relation( Library,
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/web/controllers/forms.py Wed Nov 04 10:16:20 2009 -0500
@@ -60,7 +60,9 @@
form_type_selectbox.add_option('Select one', 'none', selected=True)
else:
form_type_selectbox.add_option('Select one', 'none')
- for ft in trans.app.model.FormDefinition.types.items():
+ fd_types = trans.app.model.FormDefinition.types.items()
+ fd_types.sort()
+ for ft in fd_types:
if selected == ft[1]:
form_type_selectbox.add_option(ft[1], ft[1], selected=True)
else:
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Wed Nov 04 10:16:20 2009 -0500
@@ -304,8 +304,7 @@
s = trans.app.model.Sample(sample_name, '', request, form_values)
s.flush()
else:
- for index in range(len(current_samples)):
- sample_index = index
+ for sample_index in range(len(current_samples)):
sample_name = current_samples[sample_index][0]
new_sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
@@ -379,8 +378,10 @@
details=details,
edit_mode=edit_mode)
def __select_request_type(self, trans, rtid):
+ requesttype_list = trans.sa_session.query( trans.app.model.RequestType )\
+ .order_by( trans.app.model.RequestType.name.asc() )
rt_ids = ['none']
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
rt_ids.append(str(rt.id))
select_reqtype = SelectField('select_request_type',
@@ -390,7 +391,7 @@
select_reqtype.add_option('Select one', 'none', selected=True)
else:
select_reqtype.add_option('Select one', 'none')
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
if rtid == rt.id:
select_reqtype.add_option(rt.name, rt.id, selected=True)
diff -r 5b2d593d9aed -r 0e78d558fe57 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Nov 04 10:04:51 2009 -0500
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Nov 04 10:16:20 2009 -0500
@@ -266,8 +266,10 @@
#---- Request Creation ----------------------------------------------------------
#
def __select_request_type(self, trans, rtid):
+ requesttype_list = trans.sa_session.query( trans.app.model.RequestType )\
+ .order_by( trans.app.model.RequestType.name.asc() )
rt_ids = ['none']
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
rt_ids.append(str(rt.id))
select_reqtype = SelectField('select_request_type',
@@ -277,7 +279,7 @@
select_reqtype.add_option('Select one', 'none', selected=True)
else:
select_reqtype.add_option('Select one', 'none')
- for rt in trans.sa_session.query( trans.app.model.RequestType ):
+ for rt in requesttype_list:
if not rt.deleted:
if rtid == rt.id:
select_reqtype.add_option(rt.name, rt.id, selected=True)
@@ -799,7 +801,7 @@
# save all the new/unsaved samples entered by the user
if edit_mode == 'False':
for index in range(len(current_samples)-len(request.samples)):
- sample_index = index + len(request.samples)
+ sample_index = len(request.samples)
sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
for field_index in range(len(request.type.sample_form.fields)):
@@ -992,7 +994,7 @@
bar_code = util.restore_text(params.get('sample_%i_bar_code' % index, ''))
# check for empty bar code
if not bar_code.strip():
- msg = 'Please fill the bar code for sample <b>%s</b>.' % request.samples[index].name
+ msg = 'Please fill the barcode for sample <b>%s</b>.' % request.samples[index].name
break
# check all the unsaved bar codes
count = 0
@@ -1000,8 +1002,8 @@
if bar_code == util.restore_text(params.get('sample_%i_bar_code' % i, '')):
count = count + 1
if count > 1:
- msg = '''The bar code <b>%s</b> of sample <b>%s</b> already belongs
- another sample in this request. The sample bar codes must
+ msg = '''The barcode <b>%s</b> of sample <b>%s</b> belongs
+ another sample in this request. The sample barcodes must
be unique throughout the system''' % \
(bar_code, request.samples[index].name)
break
@@ -1009,7 +1011,7 @@
all_samples = trans.sa_session.query( trans.app.model.Sample )
for sample in all_samples:
if bar_code == sample.bar_code:
- msg = '''The bar code <b>%s</b> of sample <b>%s</b> already
+ msg = '''The bar code <b>%s</b> of sample <b>%s</b>
belongs another sample. The sample bar codes must be
unique throughout the system''' % \
(bar_code, request.samples[index].name)
@@ -1044,7 +1046,7 @@
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='bar_codes',
request_id=request.id,
- msg='Bar codes has been saved for this request',
+ msg='Bar codes have been saved for this request',
messagetype='done'))
def __set_request_state(self, request):
@@ -1162,8 +1164,8 @@
if params.get( 'create', False ):
return trans.fill_template( '/admin/requests/create_request_type.mako',
request_forms=get_all_forms( trans,
- filter=dict(deleted=False),
- form_type=trans.app.model.FormDefinition.types.REQUEST ),
+ filter=dict(deleted=False),
+ form_type=trans.app.model.FormDefinition.types.REQUEST ),
sample_forms=get_all_forms( trans,
filter=dict(deleted=False),
form_type=trans.app.model.FormDefinition.types.SAMPLE ),
diff -r 5b2d593d9aed -r 0e78d558fe57 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Wed Nov 04 10:04:51 2009 -0500
+++ b/templates/admin/requests/show_request.mako Wed Nov 04 10:16:20 2009 -0500
@@ -195,7 +195,7 @@
<div class="toolForm">
##<div class="toolFormTitle">Samples (${len(request.samples)})</div>
- <form id="edit_form" name="edit_form" action="${h.url_for( controller='requests_admin', action='show_request' )}" enctype="multipart/form-data" method="post" >
+ <form id="show_request" name="show_request" action="${h.url_for( controller='requests_admin', action='show_request', edit_mode=edit_mode )}" enctype="multipart/form-data" method="post" >
<div class="form-row">
%if current_samples:
%if not request.type.sample_form.layout:
diff -r 5b2d593d9aed -r 0e78d558fe57 templates/requests/show_request.mako
--- a/templates/requests/show_request.mako Wed Nov 04 10:04:51 2009 -0500
+++ b/templates/requests/show_request.mako Wed Nov 04 10:16:20 2009 -0500
@@ -224,20 +224,16 @@
%endif
</td>
<td>
- ##<div class="form-row">
<label>Import from csv file</label>
<input type="file" name="file_data" />
<input type="submit" name="import_samples_button" value="Import samples"/>
- ##</div>
</td>
<td>
- ##<div class="form-row">
%if current_samples:
<label>Copy from sample</label>
${sample_copy.get_html()}
%endif
<input type="submit" name="add_sample_button" value="Add New"/>
- ##</div>
</td>
</div>
</tr>
diff -r 5b2d593d9aed -r 0e78d558fe57 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Nov 04 10:04:51 2009 -0500
+++ b/test/base/twilltestcase.py Wed Nov 04 10:16:20 2009 -0500
@@ -1272,7 +1272,7 @@
for index, bar_code in enumerate(bar_codes):
tc.fv( "1", "sample_%i_bar_code" % index, bar_code )
tc.submit( "save_bar_codes" )
- self.check_page_for_string( 'Bar codes has been saved for this request' )
+ self.check_page_for_string( 'Bar codes have been saved for this request' )
def change_sample_state( self, sample_name, sample_id, new_state_id, new_state_name, comment='' ):
self.home()
self.visit_url( "%s/requests_admin/show_events?sample_id=%i" % (self.url, sample_id) )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/4bca8f8ed94d
changeset: 2959:4bca8f8ed94d
user: James Taylor <james(a)jamestaylor.org>
date: Wed Nov 04 11:35:50 2009 -0500
description:
Fix tool test specification by id
diffstat:
test/functional/test_toolbox.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (9 lines):
diff -r 0e78d558fe57 -r 4bca8f8ed94d test/functional/test_toolbox.py
--- a/test/functional/test_toolbox.py Wed Nov 04 10:16:20 2009 -0500
+++ b/test/functional/test_toolbox.py Wed Nov 04 11:35:50 2009 -0500
@@ -128,4 +128,4 @@
for j, testdef in enumerate( tool.tests ):
name = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
testcase = get_case( testdef, name )
- G[ 'testcase_%d_%d' % ( i, j ) ] = testcase
+ G[ testcase.__name__ ] = testcase
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/14e178f656ba
changeset: 2953:14e178f656ba
user: jeremy goecks <jeremy.goecks(a)emory.edu>
date: Tue Nov 03 16:53:14 2009 -0500
description:
Added 'History' column to HDA grid.
diffstat:
lib/galaxy/web/controllers/dataset.py | 26 +++++++++++++++++++-------
templates/dataset/grid.mako | 11 ++++++++---
templates/root/history_common.mako | 2 +-
3 files changed, 28 insertions(+), 11 deletions(-)
diffs (116 lines):
diff -r 6491acd0bef2 -r 14e178f656ba lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Tue Nov 03 15:39:51 2009 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Tue Nov 03 16:53:14 2009 -0500
@@ -45,6 +45,11 @@
"""
class HistoryDatasetAssociationListGrid( grids.Grid ):
+ # Custom columns for grid.
+ class HistoryColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, hda):
+ return hda.history.name
+
class StatusColumn( grids.GridColumn ):
def get_value( self, trans, grid, hda ):
if hda.deleted:
@@ -52,6 +57,7 @@
return ""
def get_link( self, trans, grid, hda ):
return None
+
class TagsColumn( grids.GridColumn ):
def __init__(self, col_name, key, filterable):
grids.GridColumn.__init__(self, col_name, key=key, filterable=filterable)
@@ -107,14 +113,16 @@
return accepted_filters
# Grid definition
- title = "Stored Datasets"
+ title = "Saved Datasets"
model_class = model.HistoryDatasetAssociation
template='/dataset/grid.mako'
default_sort_key = "-create_time"
columns = [
- grids.GridColumn( "Name", key="name",
- # Link name to dataset's history.
+ grids.GridColumn( "Name", key="name",
+ # Link name to dataset's history.
link=( lambda item: iff( item.history.deleted, None, dict( operation="switch", id=item.id ) ) ) ),
+ HistoryColumn( "History", key="history",
+ link=( lambda item: iff( item.history.deleted, None, dict( operation="switch_history", id=item.id ) ) ) ),
TagsColumn( "Tags", key="tags", filterable=True ),
StatusColumn( "Status", key="deleted", attach_popup=False ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
@@ -285,8 +293,9 @@
log.warn( "Invalid history_dataset_association id '%r' passed to list", hda_id )
if hdas:
-
- if operation == "switch":
+ if operation == "switch" or operation == "switch_history":
+ # Switch to a history that the HDA resides in.
+
# Convert hda to histories.
histories = []
for hda in hdas:
@@ -295,8 +304,11 @@
# Use history controller to switch the history. TODO: is this reasonable?
status, message = trans.webapp.controllers['history']._list_switch( trans, histories )
- # Current history changed, refresh history frame
- trans.template_context['refresh_frames'] = ['history']
+ # Current history changed, refresh history frame; if switching to a dataset, set hda seek.
+ trans.template_context['refresh_frames'] = ['history']
+ if operation == "switch":
+ hda_ids = [ trans.security.encode_id( hda.id ) for hda in hdas ]
+ trans.template_context[ 'seek_hda_ids' ] = hda_ids
# Render the list view
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
diff -r 6491acd0bef2 -r 14e178f656ba templates/dataset/grid.mako
--- a/templates/dataset/grid.mako Tue Nov 03 15:39:51 2009 -0500
+++ b/templates/dataset/grid.mako Tue Nov 03 16:53:14 2009 -0500
@@ -92,8 +92,6 @@
t.autocomplete("${h.url_for( controller='tag', action='tag_autocomplete_data', item_class='HistoryDatasetAssociation' )}", autocomplete_options);
- //t.addClass("tag-input");
-
return t;
});
## Can this be moved into base.mako?
@@ -110,7 +108,14 @@
%endif
%if 'history' in refresh_frames:
if ( parent.frames && parent.frames.galaxy_history ) {
- parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history', hda_id=str(ids) )}";
+ ## If available, include HDA ids to seek to in history request.
+ <%
+ hda_id = None
+ if seek_hda_ids:
+ hda_id = str( seek_hda_ids[0] ) # Use only the first id.
+ print hda_id
+ %>
+ parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history', hda_id=hda_id )}";
if ( parent.force_right_panel ) {
parent.force_right_panel( 'show' );
}
diff -r 6491acd0bef2 -r 14e178f656ba templates/root/history_common.mako
--- a/templates/root/history_common.mako Tue Nov 03 15:39:51 2009 -0500
+++ b/templates/root/history_common.mako Tue Nov 03 16:53:14 2009 -0500
@@ -1,6 +1,7 @@
<% _=n_ %>
## Render the dataset `data` as history item, using `hid` as the displayed id
<%def name="render_dataset( data, hid, show_deleted_on_refresh = False, user_owns_dataset = True )">
+ <a name="${trans.security.encode_id( data.id )}"></a>
<%
if data.state in ['no state','',None]:
data_state = "queued"
@@ -21,7 +22,6 @@
%endif
## Header row for history items (name, state, action buttons)
- <a name="${trans.security.encode_id(data.id)}"></a>
<div style="overflow: hidden;" class="historyItemTitleBar">
<div class="historyItemButtons">
%if data_state == "upload":
1
0
07 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d7c66019de13
changeset: 2955:d7c66019de13
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Nov 03 19:16:01 2009 -0500
description:
trackster: much greater resolution for line tracks (might still need to tweak BLOCK_SIZE), fixed stacking issue in feature tracks for features sharing same name)
diffstat:
lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py | 4 +-
lib/galaxy/visualization/tracks/data/array_tree.py | 45 ++++++-----
lib/galaxy/visualization/tracks/data/interval_index.py | 6 +-
lib/galaxy/web/controllers/tracks.py | 6 +-
static/scripts/trackster.js | 46 +++++++----
static/trackster.css | 1 +
templates/tracks/browser.mako | 1 +
7 files changed, 65 insertions(+), 44 deletions(-)
diffs (293 lines):
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py
--- a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py Tue Nov 03 19:16:01 2009 -0500
@@ -8,6 +8,8 @@
from bx.arrays.array_tree import *
from bx.arrays.wiggle import IntervalReader
+BLOCK_SIZE = 1000
+
def main():
input_fname = sys.argv[1]
@@ -16,7 +18,7 @@
reader = IntervalReader( open( input_fname ) )
# Fill array from wiggle
- d = array_tree_dict_from_wiggle_reader( reader, {} )
+ d = array_tree_dict_from_wiggle_reader( reader, {}, block_size = BLOCK_SIZE )
for value in d.itervalues():
value.root.build_summary()
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/visualization/tracks/data/array_tree.py
--- a/lib/galaxy/visualization/tracks/data/array_tree.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/visualization/tracks/data/array_tree.py Tue Nov 03 19:16:01 2009 -0500
@@ -31,11 +31,16 @@
f.close()
return { 'max': float( max(root_summary.maxs) ), 'min': float( min(root_summary.mins) ) }
- def get_data( self, chrom, start, end ):
+ def get_data( self, chrom, start, end, **kwargs ):
start = int( start )
end = int( end )
- level = int( ceil( log( end - start, BLOCK_SIZE ) ) ) - 1
-
+ resolution = max(1, ceil(float(kwargs['resolution'])))
+
+ level = int( floor( log( resolution, BLOCK_SIZE ) ) )
+ level = max( level, 0 )
+ stepsize = BLOCK_SIZE ** level
+ step1 = stepsize * BLOCK_SIZE
+
# Open the file
f = open( self.dataset.file_name )
d = FileArrayTreeDict( f )
@@ -47,22 +52,20 @@
# Is the requested level valid?
assert 0 <= level <= chrom_array_tree.levels
# Calculate the actual start/range/step of the block we're getting
- size = BLOCK_SIZE ** (level+1)
- block_start = ( start // BLOCK_SIZE ) * BLOCK_SIZE
- block_step = size // BLOCK_SIZE
- indexes = range( block_start, block_start + size, block_step )
- # Return either data point or a summary depending on the level
- if level > 0:
- s = chrom_array_tree.get_summary( start, level )
- f.close()
- if s is not None:
- return zip( indexes, map( float, s.sums / s.counts ) )
+
+ results = []
+ for block_start in range( start, end, stepsize * BLOCK_SIZE ):
+ # print block_start
+ # Return either data point or a summary depending on the level
+ indexes = range( block_start, block_start + stepsize * BLOCK_SIZE, stepsize )
+ if level > 0:
+ s = chrom_array_tree.get_summary( block_start, level )
+ if s is not None:
+ results.extend( zip( indexes, map( float, s.sums / s.counts ) ) )
else:
- return None
- else:
- v = chrom_array_tree.get_leaf( start )
- f.close()
- if v is not None:
- return zip( indexes, map( float, v ) )
- else:
- return None
\ No newline at end of file
+ v = chrom_array_tree.get_leaf( block_start )
+ if v is not None:
+ results.extend( zip( indexes, map( float, v ) ) )
+
+ f.close()
+ return results
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/visualization/tracks/data/interval_index.py
--- a/lib/galaxy/visualization/tracks/data/interval_index.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/visualization/tracks/data/interval_index.py Tue Nov 03 19:16:01 2009 -0500
@@ -11,17 +11,18 @@
self.original_dataset = original_dataset
self.converted_dataset = converted_dataset
- def get_data( self, chrom, start, end ):
+ def get_data( self, chrom, start, end, **kwargs ):
start, end = int(start), int(end)
chrom = str(chrom)
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
results = []
+ uid = 0
for start, end, offset in index.find(chrom, start, end):
source.seek(offset)
feature = source.readline().split()
- payload = { 'start': start, 'end': end, 'name': feature[3] }
+ payload = { 'uid': uid, 'start': start, 'end': end, 'name': feature[3] }
try:
payload['strand'] = feature[5]
except IndexError:
@@ -41,5 +42,6 @@
pass
results.append(payload)
+ uid += 1
return results
diff -r 46791b5a653b -r d7c66019de13 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Tue Nov 03 17:16:40 2009 -0500
+++ b/lib/galaxy/web/controllers/tracks.py Tue Nov 03 19:16:01 2009 -0500
@@ -184,7 +184,7 @@
return manifest
@web.json
- def data( self, trans, dataset_id, track_type, chrom, low, high, stats=False ):
+ def data( self, trans, dataset_id, track_type, chrom, low, high, **kwargs ):
"""
Called by the browser to request a block of data
"""
@@ -216,10 +216,10 @@
data_provider = dataset_type_to_data_provider[ converted_dataset_type ]( converted_dataset, dataset )
# Return stats if we need them
- if stats: return data_provider.get_stats( chrom )
+ if 'stats' in kwargs: return data_provider.get_stats( chrom )
# Get the requested chunk of data
- return data_provider.get_data( chrom, low, high )
+ return data_provider.get_data( chrom, low, high, **kwargs )
def __dataset_as_type( self, trans, dataset, type ):
"""
diff -r 46791b5a653b -r d7c66019de13 static/scripts/trackster.js
--- a/static/scripts/trackster.js Tue Nov 03 17:16:40 2009 -0500
+++ b/static/scripts/trackster.js Tue Nov 03 19:16:01 2009 -0500
@@ -1,13 +1,15 @@
/* Trackster
2009, James Taylor, Kanwei Li
*/
+var DEBUG = false;
var DENSITY = 1000,
DATA_ERROR = "There was an error in indexing this dataset.",
DATA_NONE = "No data for this chrom/contig.",
DATA_PENDING = "Currently indexing... please wait",
DATA_LOADING = "Loading data...",
- CACHED_TILES = 10,
+ CACHED_TILES_FEATURE = 10,
+ CACHED_TILES_LINE = 30,
CACHED_DATA = 20,
CONTEXT = $("<canvas></canvas>").get(0).getContext("2d"),
RIGHT_STRAND, LEFT_STRAND;
@@ -104,6 +106,9 @@
this.high = Math.ceil(high);
this.center = Math.round( this.low + (this.high - this.low) / 2 );
+ // 10^log10(range / DENSITY) Close approximation for browser window, assuming DENSITY = window width
+ this.resolution = Math.pow( 10, Math.ceil( Math.log( (this.high - this.low) / DENSITY ) / Math.LN10 ) );
+
// Overview
$("#overview-box").css( {
left: ( this.low / this.span ) * $("#overview-viewport").width(),
@@ -157,18 +162,16 @@
});
var TiledTrack = function() {
- this.tile_cache = new Cache(CACHED_TILES);
- // this.tile_cache = {};
};
$.extend( TiledTrack.prototype, Track.prototype, {
draw: function() {
var low = this.view.low,
high = this.view.high,
- range = high - low;
-
- var resolution = Math.pow( 10, Math.ceil( Math.log( range / DENSITY ) / Math.log( 10 ) ) );
- resolution = Math.max( resolution, 0.1 );
- resolution = Math.min( resolution, 1000000 );
+ range = high - low,
+ resolution = this.view.resolution;
+
+
+ if (DEBUG) { $("#debug").text(resolution); }
var parent_element = $("<div style='position: relative;'></div>");
this.content_div.children( ":first" ).remove();
@@ -187,7 +190,7 @@
// console.log("cached tile " + tile_index);
var tile_low = tile_index * DENSITY * resolution;
cached.css( {
- left: ( tile_low - this.view.low ) * w_scale
+ left: ( tile_low - low ) * w_scale
});
// Our responsibility to move the element to the new parent
parent_element.append( cached );
@@ -229,6 +232,7 @@
});
var LineTrack = function ( name, dataset_id, height ) {
+ this.tile_cache = new Cache(CACHED_TILES_LINE);
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
@@ -236,6 +240,7 @@
this.height_px = (height ? height : 100);
this.container_div.addClass( "line-track" );
this.dataset_id = dataset_id;
+ this.data_queue = {};
this.cache = new Cache(CACHED_DATA); // We need to cache some data because of
// asynchronous calls
};
@@ -282,11 +287,17 @@
low = position * DENSITY * resolution,
high = ( position + 1 ) * DENSITY * resolution,
key = resolution + "_" + position;
-
- $.getJSON( data_url, { track_type: this.track_type, chrom: this.view.chrom, low: low, high: high, dataset_id: this.dataset_id }, function ( data ) {
- track.cache[key] = data;
- $(document).trigger( "redraw" );
- });
+
+ if (!track.data_queue[key]) {
+ track.data_queue[key] = true;
+ $.getJSON( data_url, { track_type: this.track_type, chrom: this.view.chrom,
+ low: low, high: high, dataset_id: this.dataset_id,
+ resolution: this.view.resolution }, function ( data ) {
+ track.cache[key] = data;
+ delete track.data_queue[key];
+ track.draw();
+ });
+ }
},
draw_tile: function( resolution, tile_index, parent_element, w_scale ) {
if (!this.vertical_range) { // We don't have the necessary information yet
@@ -340,6 +351,7 @@
});
var FeatureTrack = function ( name, dataset_id, height ) {
+ this.tile_cache = new Cache(CACHED_TILES_FEATURE);
Track.call( this, name, $("#viewport") );
TiledTrack.call( this );
@@ -409,9 +421,9 @@
if (end_ary[j] === undefined || end_ary[j] < f_start) {
end_ary[j] = f_end;
if (include_labels) {
- this.zi_slots[feature.name] = j;
+ this.zi_slots[feature.uid] = j;
} else {
- this.zo_slots[feature.name] = j;
+ this.zo_slots[feature.uid] = j;
}
break;
}
@@ -466,7 +478,7 @@
if (feature.start <= tile_high && feature.end >= tile_low) {
var f_start = Math.floor( Math.max(0, (feature.start - tile_low) * w_scale) ),
f_end = Math.ceil( Math.min(width, (feature.end - tile_low) * w_scale) ),
- y_center = this.slots[feature.name] * this.vertical_gap;
+ y_center = this.slots[feature.uid] * this.vertical_gap;
var thickness, y_start, thick_start = null, thick_end = null;
if (feature.thick_start && feature.thick_end) {
diff -r 46791b5a653b -r d7c66019de13 static/trackster.css
--- a/static/trackster.css Tue Nov 03 17:16:40 2009 -0500
+++ b/static/trackster.css Tue Nov 03 19:16:01 2009 -0500
@@ -25,6 +25,7 @@
font-size: 100%;
}
+/*canvas{ border-right: 1px solid red; } /* debugging */
#nav {
padding: 0.5em 0;
background:#cccccc;
diff -r 46791b5a653b -r d7c66019de13 templates/tracks/browser.mako
--- a/templates/tracks/browser.mako Tue Nov 03 17:16:40 2009 -0500
+++ b/templates/tracks/browser.mako Tue Nov 03 19:16:01 2009 -0500
@@ -122,6 +122,7 @@
<a href="#" onclick="javascript:view.zoom_in();view.redraw();">+</a>
<a href="#" onclick="javascript:view.zoom_out();view.redraw();">-</a>
</form>
+ <div id="debug" style="float: right"></div>
</div>
</div>
</div>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/d872c1e16afb
changeset: 2950:d872c1e16afb
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 03 12:52:01 2009 -0500
description:
imported patch alchemy05_fixes_02
diffstat:
lib/galaxy/jobs/__init__.py | 6 +++---
lib/galaxy/model/mapping.py | 4 ++--
lib/galaxy/model/mapping_tests.py | 15 ++++++++-------
lib/galaxy/model/migrate/versions/0025_user_info.py | 4 ++--
lib/galaxy/tools/actions/__init__.py | 7 ++++---
lib/galaxy/tools/actions/metadata.py | 5 +++--
lib/galaxy/tools/actions/upload_common.py | 4 +++-
lib/galaxy/web/controllers/async.py | 6 +++---
lib/galaxy/web/controllers/dataset.py | 4 ++--
lib/galaxy/web/controllers/library.py | 6 +++---
lib/galaxy/web/controllers/library_admin.py | 12 ++++++++----
lib/galaxy/web/controllers/requests.py | 2 +-
lib/galaxy/web/controllers/root.py | 10 +++++-----
lib/galaxy/web/controllers/tool_runner.py | 2 --
lib/galaxy/web/framework/__init__.py | 23 ++++++++++++++---------
test/functional/test_forms_and_requests.py | 1 +
test/functional/test_security_and_libraries.py | 2 +-
test/functional/test_user_info.py | 3 ++-
tools/data_source/microbial_import_code.py | 6 +++---
tools/filters/lav_to_bed_code.py | 1 -
tools/maf/maf_to_bed_code.py | 4 +---
21 files changed, 69 insertions(+), 58 deletions(-)
diffs (567 lines):
diff -r 133252175425 -r d872c1e16afb lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/jobs/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -199,7 +199,7 @@
try:
# Clear the session for each job so we get fresh states for
# job and all datasets
- self.sa_session.clear()
+ self.sa_session.expunge_all()
# Get the real job entity corresponding to the wrapper (if we
# are tracking in the database this is probably cached in
# the session from the origianl query above)
@@ -346,7 +346,7 @@
Prepare the job to run by creating the working directory and the
config files.
"""
- self.sa_session.clear() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
+ self.sa_session.expunge_all() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
if not os.path.exists( self.working_directory ):
os.mkdir( self.working_directory )
# Restore parameters from the database
@@ -477,7 +477,7 @@
the contents of the output files.
"""
# default post job setup
- self.sa_session.clear()
+ self.sa_session.expunge_all()
job = self.sa_session.query( model.Job ).get( self.job_id )
# if the job was deleted, don't finish it
if job.state == job.states.DELETED:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/mapping.py Tue Nov 03 12:52:01 2009 -0500
@@ -817,7 +817,7 @@
user=relation( User, backref="roles" ),
non_private_roles=relation( User,
backref="non_private_roles",
- primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email & Role.table.c.type == 'private' ) ) ),
+ primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email ) ) ),
role=relation( Role )
)
)
@@ -1134,7 +1134,7 @@
# Pack everything into a bunch
result = Bunch( **globals() )
result.engine = engine
- result.flush = lambda *args, **kwargs: Session.flush( *args, **kwargs )
+ # model.flush() has been removed.
result.session = Session
# For backward compatibility with "model.context.current"
result.context = Session
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/mapping_tests.py
--- a/lib/galaxy/model/mapping_tests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/mapping_tests.py Tue Nov 03 12:52:01 2009 -0500
@@ -19,16 +19,17 @@
d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True )
#h2.queries.append( q1 )
#h2.queries.append( model.Query( "h2->q2" ) )
- model.context.current.flush()
- model.context.current.clear()
+ model.session.add_all( ( u, h1, h2, d1 ) )
+ model.session.flush()
+ model.session.expunge_all()
# Check
- users = model.context.current.query( model.User ).all()
+ users = model.session.query( model.User ).all()
assert len( users ) == 1
assert users[0].email == "james(a)foo.bar.baz"
assert users[0].password == "password"
assert len( users[0].histories ) == 1
assert users[0].histories[0].name == "History 1"
- hists = model.context.current.query( model.History ).all()
+ hists = model.session.query( model.History ).all()
assert hists[0].name == "History 1"
assert hists[1].name == ( "H" * 255 )
assert hists[0].user == users[0]
@@ -38,9 +39,9 @@
assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
# Do an update and check
hists[1].name = "History 2b"
- model.context.current.flush()
- model.context.current.clear()
- hists = model.context.current.query( model.History ).all()
+ model.session.flush()
+ model.session.expunge_all()
+ hists = model.session.query( model.History ).all()
assert hists[0].name == "History 1"
assert hists[1].name == "History 2b"
# gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
diff -r 133252175425 -r d872c1e16afb lib/galaxy/model/migrate/versions/0025_user_info.py
--- a/lib/galaxy/model/migrate/versions/0025_user_info.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0025_user_info.py Tue Nov 03 12:52:01 2009 -0500
@@ -21,7 +21,7 @@
log.addHandler( handler )
metadata = MetaData( migrate_engine )
-db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
def display_migration_details():
print "========================================"
@@ -59,4 +59,4 @@
except Exception, e:
log.debug( "Adding foreign key constraint 'user_form_values_id_fk' to table 'galaxy_user' failed: %s" % ( str( e ) ) )
def downgrade():
- pass
\ No newline at end of file
+ pass
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -235,7 +235,7 @@
# Store output
out_data[ name ] = data
# Store all changes to database
- trans.app.model.flush()
+ trans.sa_session.flush()
# Add all the top-level (non-child) datasets to the history
for name in out_data.keys():
if name not in child_dataset_names and name not in incoming: #don't add children; or already existing datasets, i.e. async created
@@ -248,7 +248,7 @@
child_dataset = out_data[ child_name ]
parent_dataset.children.append( child_dataset )
# Store data after custom code runs
- trans.app.model.flush()
+ trans.sa_session.flush()
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
@@ -274,7 +274,8 @@
job.add_input_dataset( name, None )
for name, dataset in out_data.iteritems():
job.add_output_dataset( name, dataset )
- trans.app.model.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
# Some tools are not really executable, but jobs are still created for them ( for record keeping ).
# Examples include tools that redirect to other applications ( epigraph ). These special tools must
# include something that can be retrieved from the params ( e.g., REDIRECT_URL ) to keep the job
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/metadata.py Tue Nov 03 12:52:01 2009 -0500
@@ -26,7 +26,8 @@
job.tool_version = tool.version
except:
job.tool_version = "1.0.0"
- job.flush() #ensure job.id is available
+ trans.sa_session.add( job )
+ trans.sa_session.flush() #ensure job.id is available
#add parameters to job_parameter table
# Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)?
@@ -49,7 +50,7 @@
#Need a special state here to show that metadata is being set and also allow the job to run
# i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
dataset.state = dataset.states.SETTING_METADATA
- trans.app.model.flush()
+ trans.sa_session.flush()
# Queue the job for execution
trans.app.job_queue.put( job.id, tool )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/tools/actions/upload_common.py Tue Nov 03 12:52:01 2009 -0500
@@ -121,6 +121,7 @@
trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey )
permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
+ trans.sa_session.flush()
return hda
def new_library_upload( trans, uploaded_dataset, library_bunch, state=None ):
@@ -291,7 +292,8 @@
for i, dataset in enumerate( data_list ):
job.add_output_dataset( 'output%i' % i, dataset )
job.state = job.states.NEW
- trans.app.model.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
# Queue the job for execution
trans.app.job_queue.put( job.id, tool )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/async.py Tue Nov 03 12:52:01 2009 -0500
@@ -77,7 +77,7 @@
data.state = data.blurb = jobs.JOB_ERROR
data.info = "Error -> %s" % STATUS
- trans.model.flush()
+ trans.sa_session.flush()
return "Data %s with status %s received. OK" % (data_id, STATUS)
@@ -112,7 +112,7 @@
data.flush()
open( data.file_name, 'wb' ).close() #create the file
trans.history.add_dataset( data, genome_build=GALAXY_BUILD )
- trans.model.flush()
+ trans.sa_session.flush()
trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )
try:
@@ -132,6 +132,6 @@
data.info = str(e)
data.state = data.blurb = data.states.ERROR
- trans.model.flush()
+ trans.sa_session.flush()
return trans.fill_template('tool_executed.tmpl', out_data={}, tool=tool, config=self.app.config )
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Tue Nov 03 12:52:01 2009 -0500
@@ -335,7 +335,7 @@
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
data.mark_undeleted()
- self.app.model.flush()
+ trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
return True
return False
@@ -407,7 +407,7 @@
hist.add_dataset( data.copy( copy_children = True ) )
if history in target_histories:
refresh_frames = ['history']
- trans.app.model.flush()
+ trans.sa_session.flush()
done_msg = "%i datasets copied to %i histories." % ( len( source_dataset_ids ) - invalid_datasets, len( target_histories ) )
trans.sa_session.refresh( history )
elif create_new_history:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/library.py Tue Nov 03 12:52:01 2009 -0500
@@ -421,7 +421,7 @@
if trans.app.security_agent.can_modify_library_item( user, roles, ldda ):
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = "Data type changed for library dataset '%s'" % ldda.name
messagetype = 'done'
else:
@@ -463,7 +463,7 @@
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
else:
@@ -486,7 +486,7 @@
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
else:
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Tue Nov 03 12:52:01 2009 -0500
@@ -432,7 +432,7 @@
# The user clicked the Save button on the 'Change data type' form
if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = "Data type changed for library dataset '%s'" % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -469,7 +469,7 @@
setattr( ldda.metadata, name, spec.unwrap( params.get ( name, None ) ) )
ldda.metadata.dbkey = dbkey
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
messagetype = 'done'
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
@@ -488,7 +488,7 @@
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
ldda.datatype.set_meta( ldda )
ldda.datatype.after_edit( ldda )
- trans.app.model.flush()
+ trans.sa_session.flush()
msg = 'Attributes updated for library dataset %s' % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -674,6 +674,10 @@
replace_id = params.get( 'replace_id', None )
if replace_id not in [ None, 'None' ]:
replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( int( replace_id ) )
+ # The name is separately - by the time the new ldda is created,
+ # replace_dataset.name will point to the new ldda, not the one it's
+ # replacing.
+ replace_dataset_name = replace_dataset.name
if not last_used_build:
last_used_build = replace_dataset.library_dataset_dataset_association.dbkey
# Don't allow multiple datasets to be uploaded when replacing a dataset with a new version
@@ -701,7 +705,7 @@
if created_outputs:
total_added = len( created_outputs.values() )
if replace_dataset:
- msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset.name, folder.name )
+ msg = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, replace_dataset_name, folder.name )
else:
if not folder.parent:
# Libraries have the same name as their root_folder
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Tue Nov 03 12:52:01 2009 -0500
@@ -294,7 +294,7 @@
# save all the new/unsaved samples entered by the user
if edit_mode == 'False':
for index in range(len(current_samples)-len(request.samples)):
- sample_index = index + len(request.samples)
+ sample_index = len(request.samples)
sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
sample_values = []
for field_index in range(len(request.type.sample_form.fields)):
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Tue Nov 03 12:52:01 2009 -0500
@@ -277,7 +277,7 @@
if not __ok_to_edit_metadata( data.id ):
return trans.show_error_message( "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them." )
trans.app.datatypes_registry.change_datatype( data, params.datatype )
- trans.app.model.flush()
+ trans.sa_session.flush()
else:
return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
elif params.save:
@@ -303,7 +303,7 @@
data.datatype.after_edit( data )
else:
msg = ' (Metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata.)'
- trans.app.model.flush()
+ trans.sa_session.flush()
return trans.show_ok_message( "Attributes updated%s" % msg, refresh_frames=['history'] )
elif params.detect:
# The user clicked the Auto-detect button on the 'Edit Attributes' form
@@ -322,7 +322,7 @@
msg = 'Attributes updated'
data.set_meta()
data.datatype.after_edit( data )
- trans.app.model.flush()
+ trans.sa_session.flush()
return trans.show_ok_message( msg, refresh_frames=['history'] )
elif params.convert_data:
target_type = kwd.get("target_type", None)
@@ -383,7 +383,7 @@
if job.check_if_output_datasets_deleted():
job.mark_deleted()
self.app.job_manager.job_stop_queue.put( job.id )
- self.app.model.flush()
+ trans.sa_session.flush()
@web.expose
def delete( self, trans, id = None, show_deleted_on_refresh = False, **kwd):
@@ -432,7 +432,7 @@
for dataset in history.datasets:
dataset.deleted = True
dataset.clear_associated_files()
- self.app.model.flush()
+ trans.sa_session.flush()
trans.log_event( "History id %s cleared" % (str(history.id)) )
trans.response.send_redirect( url_for("/index" ) )
@web.expose
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/controllers/tool_runner.py Tue Nov 03 12:52:01 2009 -0500
@@ -198,8 +198,6 @@
# pasted data
datasets.append( create_dataset( 'Pasted Entry' ) )
break
- if datasets:
- trans.model.flush()
return [ d.id for d in datasets ]
@web.expose
diff -r 133252175425 -r d872c1e16afb lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Tue Nov 03 12:52:00 2009 -0500
+++ b/lib/galaxy/web/framework/__init__.py Tue Nov 03 12:52:01 2009 -0500
@@ -139,7 +139,7 @@
self.__galaxy_session = NOT_SET
base.DefaultWebTransaction.__init__( self, environ )
self.setup_i18n()
- self.sa_session.clear()
+ self.sa_session.expunge_all()
self.debug = asbool( self.app.config.get( 'debug', False ) )
# Flag indicating whether we are in workflow building mode (means
# that the current history should not be used for parameter values
@@ -302,12 +302,12 @@
self.galaxy_session = galaxy_session
# Do we need to flush the session?
if galaxy_session_requires_flush:
- objects_to_flush = [ galaxy_session ]
+ sa_session.add( galaxy_session )
# FIXME: If prev_session is a proper relation this would not
# be needed.
if prev_galaxy_session:
- objects_to_flush.append( prev_galaxy_session )
- sa_session.flush( objects_to_flush )
+ sa_session.add( prev_galaxy_session )
+ sa_session.flush()
# If the old session was invalid, get a new history with our new session
if invalidate_existing_session:
self.new_history()
@@ -427,7 +427,8 @@
if not last_accessed:
# Only set default history permissions if current history is not from a previous session
self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.flush( [ prev_galaxy_session, self.galaxy_session, history ] )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
+ self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
def handle_user_logout( self ):
@@ -439,7 +440,8 @@
prev_galaxy_session = self.galaxy_session
prev_galaxy_session.is_valid = False
self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.flush( [ prev_galaxy_session, self.galaxy_session ] )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+ self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
@@ -466,7 +468,8 @@
def set_history( self, history ):
if history and not history.deleted:
self.galaxy_session.current_history = history
- self.sa_session.flush( [ self.galaxy_session ] )
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
history = property( get_history, set_history )
def new_history( self, name=None ):
"""
@@ -489,7 +492,8 @@
# Set the user's default history permissions
self.app.security_agent.history_set_default_permissions( history )
# Save
- self.sa_session.flush( [ self.galaxy_session, history ] )
+ self.sa_session.add_all( ( self.galaxy_session, history ) )
+ self.sa_session.flush()
return history
def get_user( self ):
@@ -498,7 +502,8 @@
def set_user( self, user ):
"""Set the current user."""
self.galaxy_session.user = user
- self.sa_session.flush( [ self.galaxy_session ] )
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
user = property( get_user, set_user )
def get_user_and_roles( self ):
diff -r 133252175425 -r d872c1e16afb test/functional/test_forms_and_requests.py
--- a/test/functional/test_forms_and_requests.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_forms_and_requests.py Tue Nov 03 12:52:01 2009 -0500
@@ -29,6 +29,7 @@
.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False ) \
.order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
for fdc in fdc_list:
+ sa_session.refresh( fdc.latest_form )
if form_name == fdc.latest_form.name:
return fdc.latest_form
return None
diff -r 133252175425 -r d872c1e16afb test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_security_and_libraries.py Tue Nov 03 12:52:01 2009 -0500
@@ -156,7 +156,7 @@
raise AssertionError( '%s not in history id %d default_permissions after they were changed' % ( value.action, latest_history.id ) )
# Add a dataset to the history
self.upload_file( '1.bed' )
- latest_dataset = galaxy.model.Dataset.query().order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
+ latest_dataset = sa_session.query( galaxy.model.Dataset ).order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
# Make sure DatasetPermissionss are correct
if len( latest_dataset.actions ) != len( latest_history.default_permissions ):
raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been %d )' % \
diff -r 133252175425 -r d872c1e16afb test/functional/test_user_info.py
--- a/test/functional/test_user_info.py Tue Nov 03 12:52:00 2009 -0500
+++ b/test/functional/test_user_info.py Tue Nov 03 12:52:01 2009 -0500
@@ -14,6 +14,7 @@
.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False ) \
.order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
for fdc in fdc_list:
+ sa_session.refresh( fdc.latest_form )
if form_name == fdc.latest_form.name:
return fdc.latest_form
return None
@@ -146,4 +147,4 @@
self.visit_page('forms/manage?show_filter=Deleted')
self.check_page_for_string(form_one_latest.name)
self.logout()
-
\ No newline at end of file
+
diff -r 133252175425 -r d872c1e16afb tools/data_source/microbial_import_code.py
--- a/tools/data_source/microbial_import_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/data_source/microbial_import_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -123,7 +123,7 @@
data = app.datatypes_registry.change_datatype( data, file_type )
data.init_meta()
data.set_peek()
- app.model.flush()
+ data.flush()
elif fields[0] == "#NewFile":
description = fields[1]
chr = fields[2]
@@ -137,7 +137,7 @@
newdata.flush()
app.security_agent.copy_dataset_permissions( base_dataset.dataset, newdata.dataset )
history.add_dataset( newdata )
- app.model.flush()
+ history.flush()
try:
copyfile(filepath,newdata.file_name)
newdata.info = newdata.name
@@ -148,4 +148,4 @@
newdata.dbkey = dbkey
newdata.init_meta()
newdata.set_peek()
- app.model.flush()
+ newdata.flush()
diff -r 133252175425 -r d872c1e16afb tools/filters/lav_to_bed_code.py
--- a/tools/filters/lav_to_bed_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/filters/lav_to_bed_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -16,4 +16,3 @@
data.flush()
except:
continue
- app.model.flush()
\ No newline at end of file
diff -r 133252175425 -r d872c1e16afb tools/maf/maf_to_bed_code.py
--- a/tools/maf/maf_to_bed_code.py Tue Nov 03 12:52:00 2009 -0500
+++ b/tools/maf/maf_to_bed_code.py Tue Nov 03 12:52:01 2009 -0500
@@ -21,7 +21,6 @@
output_data.dbkey = dbkey
output_data.name = basic_name + " (" + dbkey + ")"
output_data.flush()
- app.model.flush()
output_data_list.append(output_data)
elif line.startswith("#FILE"):
fields = line.split("\t")
@@ -36,7 +35,6 @@
app.security_agent.copy_dataset_permissions( output_data.dataset, newdata.dataset )
newdata.flush()
history.flush()
- app.model.flush()
try:
move(filepath,newdata.file_name)
newdata.info = newdata.name
@@ -47,7 +45,7 @@
newdata.dbkey = dbkey
newdata.init_meta()
newdata.set_peek()
- app.model.flush()
+ newdata.flush()
output_data_list.append(newdata)
else:
new_stdout = new_stdout + line
1
0