galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
November 2009
- 26 participants
- 233 discussions
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/aa388ba7d692
changeset: 3019:aa388ba7d692
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Thu Nov 12 13:02:04 2009 -0500
description:
Corrected sloppy temp file/directory cleanup in index creation for Bowtie and BWA
diffstat:
tools/sr_mapping/bowtie_wrapper.py | 24 +++++++++++-------------
tools/sr_mapping/bwa_wrapper.py | 19 +++++++++++--------
2 files changed, 22 insertions(+), 21 deletions(-)
diffs (142 lines):
diff -r 24f0d1e7f39f -r aa388ba7d692 tools/sr_mapping/bowtie_wrapper.py
--- a/tools/sr_mapping/bowtie_wrapper.py Thu Nov 12 11:07:05 2009 -0500
+++ b/tools/sr_mapping/bowtie_wrapper.py Thu Nov 12 13:02:04 2009 -0500
@@ -4,7 +4,7 @@
Runs Bowtie on single-end or paired-end data.
"""
-import optparse, os, sys, tempfile
+import optparse, os, shutil, sys, tempfile
def stop_err( msg ):
sys.stderr.write( "%s\n" % msg )
@@ -62,7 +62,8 @@
parser.add_option('', '--indexSettings', dest='index_settings', help='Whether or not indexing options are to be set')
parser.add_option('', '--suppressHeader', dest='suppressHeader', help='Suppress header')
(options, args) = parser.parse_args()
-
+ # make temp directory for placement of indices and copy reference file there if necessary
+ tmp_index_dir = tempfile.mkdtemp()
# index if necessary
if options.genomeSource == 'history':
# set up commands
@@ -85,22 +86,19 @@
('','--cutoff %s'%options.icutoff)[int(options.icutoff)>0])
except ValueError:
indexing_cmds = ''
-
- # make temp directory for placement of indices and copy reference file there
- tmp_dir = tempfile.gettempdir()
try:
- os.system('cp %s %s' % (options.ref, tmp_dir))
+ shutil.copy(options.ref, tmp_index_dir)
except Exception, erf:
stop_err('Error creating temp directory for indexing purposes\n' + str(erf))
- options.ref = os.path.join(tmp_dir,os.path.split(options.ref)[1])
- cmd1 = 'cd %s; bowtie-build %s -f %s %s 2> /dev/null' % (tmp_dir, indexing_cmds, options.ref, options.ref)
+ options.ref = os.path.join(tmp_index_dir,os.path.split(options.ref)[1])
+ cmd1 = 'bowtie-build %s -f %s %s 2> /dev/null' % (indexing_cmds, options.ref, options.ref)
try:
+ os.chdir(tmp_index_dir)
os.system(cmd1)
except Exception, erf:
stop_err('Error indexing reference sequence\n' + str(erf))
-
# set up aligning and generate aligning command options
- # automatically set threads to 8 in both cases
+ # automatically set threads in both cases
if options.params == 'pre_set':
aligning_cmds = '-p %s -S' % options.threads
else:
@@ -134,19 +132,16 @@
options.threads)
except ValueError, erf:
stop_err('Something is wrong with the alignment parameters and the alignment could not be run\n' + str(erf))
-
# prepare actual aligning commands
if options.paired == 'paired':
cmd2 = 'bowtie %s %s -1 %s -2 %s > %s 2> /dev/null' % (aligning_cmds, options.ref, options.input1, options.input2, options.output)
else:
cmd2 = 'bowtie %s %s %s > %s 2> /dev/null' % (aligning_cmds, options.ref, options.input1, options.output)
-
# align
try:
os.system(cmd2)
except Exception, erf:
stop_err("Error aligning sequence\n" + str(erf))
-
# remove header if necessary
if options.suppressHeader == 'true':
tmp_out = tempfile.NamedTemporaryFile()
@@ -171,5 +166,8 @@
line = output.readline()
fout.close()
tmp_out.close()
+ # clean up temp dir
+ if os.path.exists(tmp_index_dir):
+ shutil.rmtree(tmp_index_dir)
if __name__=="__main__": __main__()
diff -r 24f0d1e7f39f -r aa388ba7d692 tools/sr_mapping/bwa_wrapper.py
--- a/tools/sr_mapping/bwa_wrapper.py Thu Nov 12 11:07:05 2009 -0500
+++ b/tools/sr_mapping/bwa_wrapper.py Thu Nov 12 13:02:04 2009 -0500
@@ -5,7 +5,7 @@
Produces a SAM file containing the mappings.
"""
-import optparse, os, sys, tempfile
+import optparse, os, shutil, sys, tempfile
def stop_err( msg ):
sys.stderr.write( "%s\n" % msg )
@@ -43,13 +43,12 @@
parser.add_option('', '--dbkey', dest='dbkey', help='')
parser.add_option('', '--suppressHeader', dest='suppressHeader', help='Suppress header')
(options, args) = parser.parse_args()
-
+ # make temp directory for placement of indices and copy reference file there
+ tmp_index_dir = tempfile.mkdtemp()
# index if necessary
if options.fileSource == 'history':
- # make temp directory for placement of indices and copy reference file there
- tmp_dir = tempfile.gettempdir()
try:
- os.system('cp %s %s' % (options.ref, tmp_dir))
+ shutil.copy(options.ref, tmp_index_dir)
except Exception, erf:
stop_err('Error creating temp directory for indexing purposes\n' + str(erf))
try:
@@ -64,9 +63,10 @@
indexing_cmds = '-c -a %s' % indexingAlg
else:
indexing_cmds = '-a %s' % indexingAlg
- options.ref = os.path.join(tmp_dir,os.path.split(options.ref)[1])
- cmd1 = 'cd %s; bwa index %s %s 2> /dev/null' % (tmp_dir, indexing_cmds, options.ref)
+ options.ref = os.path.join(tmp_index_dir,os.path.split(options.ref)[1])
+ cmd1 = 'bwa index %s %s 2> /dev/null' % (indexing_cmds, options.ref)
try:
+ os.chdir(tmp_index_dir)
os.system(cmd1)
except Exception, erf:
stop_err('Error indexing reference sequence\n' + str(erf))
@@ -89,10 +89,10 @@
gen_alignment_cmds = '-n %s' % options.outputTopN
elif options.genAlignType == 'paired':
gen_alignment_cmds = '-a %s -o %s' % (options.maxInsertSize, options.maxOccurPairing)
+# print 'options.genAlignType: %s and commands: %s' % (options.genAlignType, gen_alignment_cmds)
# set up output files
tmp_align_out = tempfile.NamedTemporaryFile()
tmp_align_out2 = tempfile.NamedTemporaryFile()
-
# prepare actual aligning and generate aligning commands
cmd2 = 'bwa aln %s %s %s > %s 2> /dev/null' % (aligning_cmds, options.ref, options.fastq, tmp_align_out.name)
cmd2b = ''
@@ -144,5 +144,8 @@
line = output.readline()
fout.close()
tmp_out.close()
+ # clean up temp dir
+ if os.path.exists(tmp_index_dir):
+ shutil.rmtree(tmp_index_dir)
if __name__=="__main__": __main__()
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/2e8b8b0bc366
changeset: 3020:2e8b8b0bc366
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Nov 12 13:12:12 2009 -0500
description:
Upgrade Paste/PasteDeploy/PasteScript. Paste can go no further without changing the way we stream templates and tarballs.
diffstat:
eggs.ini | 12 +++---
lib/galaxy/web/buildapp.py | 8 +++-
scripts/check_python.py | 1 +
scripts/functional_tests.py | 2 +-
scripts/scramble/scripts/PasteScript.py | 53 ++++++++++++++++++++++++++
5 files changed, 68 insertions(+), 8 deletions(-)
diffs (134 lines):
diff -r aa388ba7d692 -r 2e8b8b0bc366 eggs.ini
--- a/eggs.ini Thu Nov 12 13:02:04 2009 -0500
+++ b/eggs.ini Thu Nov 12 13:12:12 2009 -0500
@@ -37,9 +37,9 @@
Mako = 0.2.5
nose = 0.11.1
NoseHTML = 0.3
-Paste = 1.5.1
-PasteDeploy = 1.3.1
-PasteScript = 1.3.6
+Paste = 1.6
+PasteDeploy = 1.3.3
+PasteScript = 1.7.3
Routes = 1.11
simplejson = 1.5
SQLAlchemy = 0.5.6
@@ -86,9 +86,9 @@
Mako = http://www.makotemplates.org/downloads/Mako-0.2.5.tar.gz
nose = http://pypi.python.org/packages/source/n/nose/nose-0.11.1.tar.gz
NoseHTML = http://bitbucket.org/james_taylor/nosehtml/get/c46a54d569ae.bz2
-Paste = http://cheeseshop.python.org/packages/source/P/Paste/Paste-1.5.1.tar.gz
-PasteDeploy = http://cheeseshop.python.org/packages/source/P/PasteDeploy/PasteDeploy-1.3.…
-PasteScript = http://cheeseshop.python.org/packages/source/P/PasteScript/PasteScript-1.3.…
+Paste = http://cheeseshop.python.org/packages/source/P/Paste/Paste-1.6.tar.gz
+PasteDeploy = http://cheeseshop.python.org/packages/source/P/PasteDeploy/PasteDeploy-1.3.…
+PasteScript = http://cheeseshop.python.org/packages/source/P/PasteScript/PasteScript-1.7.…
PSI = http://pypi.python.org/packages/source/P/PSI/PSI-0.3b1.1.tar.gz
Routes = http://pypi.python.org/packages/source/R/Routes/Routes-1.11.tar.gz
simplejson = http://cheeseshop.python.org/packages/source/s/simplejson/simplejson-1.5.ta…
diff -r aa388ba7d692 -r 2e8b8b0bc366 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py Thu Nov 12 13:02:04 2009 -0500
+++ b/lib/galaxy/web/buildapp.py Thu Nov 12 13:12:12 2009 -0500
@@ -4,6 +4,7 @@
import logging, atexit
import os, os.path
+import sys, warnings
from inspect import isclass
@@ -151,7 +152,12 @@
log.debug( "Enabling 'eval exceptions' middleware" )
else:
# Not in interactive debug mode, just use the regular error middleware
- from paste.exceptions import errormiddleware
+ if sys.version_info[:2] >= ( 2, 6 ):
+ warnings.filterwarnings( 'ignore', '.*', DeprecationWarning, '.*serial_number_generator', 11, True )
+ from paste.exceptions import errormiddleware
+ warnings.filters.pop()
+ else:
+ from paste.exceptions import errormiddleware
app = errormiddleware.ErrorMiddleware( app, conf )
log.debug( "Enabling 'error' middleware" )
# Transaction logging (apache access.log style)
diff -r aa388ba7d692 -r 2e8b8b0bc366 scripts/check_python.py
--- a/scripts/check_python.py Thu Nov 12 13:02:04 2009 -0500
+++ b/scripts/check_python.py Thu Nov 12 13:12:12 2009 -0500
@@ -1,6 +1,7 @@
import os, sys
def check_python():
+ return
try:
assert sys.version_info[:2] >= ( 2, 4 ) and sys.version_info[:2] <= ( 2, 5 )
except AssertionError:
diff -r aa388ba7d692 -r 2e8b8b0bc366 scripts/functional_tests.py
--- a/scripts/functional_tests.py Thu Nov 12 13:02:04 2009 -0500
+++ b/scripts/functional_tests.py Thu Nov 12 13:12:12 2009 -0500
@@ -118,7 +118,7 @@
if start_server:
- webapp = buildapp.app_factory( dict(), use_translogger = False, app=app )
+ webapp = buildapp.app_factory( dict(), use_translogger = False, static_enabled = False, app=app )
if galaxy_test_port is not None:
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
diff -r aa388ba7d692 -r 2e8b8b0bc366 scripts/scramble/scripts/PasteScript.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/PasteScript.py Thu Nov 12 13:12:12 2009 -0500
@@ -0,0 +1,53 @@
+import os, sys, shutil
+
+# change back to the build dir
+if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+# find setuptools
+scramble_lib = os.path.join( "..", "..", "..", "lib" )
+sys.path.append( scramble_lib )
+from ez_setup import use_setuptools
+use_setuptools( download_delay=8, to_dir=scramble_lib )
+from setuptools import *
+
+# get the tag
+if os.access( ".galaxy_tag", os.F_OK ):
+ tagfile = open( ".galaxy_tag", "r" )
+ tag = tagfile.readline().strip()
+else:
+ tag = None
+
+# in case you're running this by hand from a dirty module source dir
+for dir in [ "build", "dist" ]:
+ if os.access( dir, os.F_OK ):
+ print "scramble.py: removing dir:", dir
+ shutil.rmtree( dir )
+
+# patch
+file = 'setup.py'
+if not os.access( "%s.orig" %file, os.F_OK ):
+ print "scramble_it(): Patching", file
+ shutil.copyfile( file, "%s.orig" %file )
+ i = open( "%s.orig" %file, "r" )
+ o = open( file, "w" )
+ for line in i.readlines():
+ if line in ( " 'Paste>=1.3',\n", " 'PasteDeploy',\n" ):
+ continue
+ print >>o, line,
+ i.close()
+ o.close()
+
+# reset args for distutils
+me = sys.argv[0]
+sys.argv = [ me ]
+sys.argv.append( "egg_info" )
+if tag is not None:
+ #sys.argv.append( "egg_info" )
+ sys.argv.append( "--tag-build=%s" %tag )
+# svn revision (if any) is handled directly in tag-build
+sys.argv.append( "--no-svn-revision" )
+sys.argv.append( "bdist_egg" )
+
+# do it
+execfile( "setup.py", globals(), locals() )
1
0
16 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/8bc85721cbce
changeset: 3021:8bc85721cbce
user: rc
date: Thu Nov 12 13:20:19 2009 -0500
description:
Applied patch provided by Brad Chapman in the liftOver tool to support BED files that contain track or browser lines. Resolves bitbucket issue 201.
diffstat:
tools/extract/liftOver_wrapper.py | 30 ++++++++++++++++++++++++++++--
tools/extract/liftOver_wrapper.xml | 5 ++++-
2 files changed, 32 insertions(+), 3 deletions(-)
diffs (72 lines):
diff -r 2e8b8b0bc366 -r 8bc85721cbce tools/extract/liftOver_wrapper.py
--- a/tools/extract/liftOver_wrapper.py Thu Nov 12 13:12:12 2009 -0500
+++ b/tools/extract/liftOver_wrapper.py Thu Nov 12 13:20:19 2009 -0500
@@ -5,12 +5,34 @@
"""
import sys, os, string
+import tempfile
+import re
assert sys.version_info[:2] >= ( 2, 4 )
def stop_err(msg):
sys.stderr.write(msg)
sys.exit()
+
+def safe_bed_file(infile):
+ """Make a BED file with track and browser lines ready for liftOver.
+
+ liftOver will fail with track or browser lines. We can make it happy
+ by converting these to comments. See:
+
+ https://lists.soe.ucsc.edu/pipermail/genome/2007-May/013561.html
+ """
+ fix_pat = re.compile("^(track|browser)")
+ (fd, fname) = tempfile.mkstemp()
+ in_handle = open(infile)
+ out_handle = open(fname, "w")
+ for line in in_handle:
+ if fix_pat.match(line):
+ line = "#" + line
+ out_handle.write(line)
+ in_handle.close()
+ out_handle.close()
+ return fname
if len( sys.argv ) != 7:
stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey minMatch" )
@@ -29,11 +51,15 @@
if in_dbkey == "?":
stop_err( "Input dataset genome build unspecified, click the pencil icon in the history item to specify it." )
-cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null 2>&1"
if not os.path.isfile( mapfilepath ):
stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
+
+safe_infile = safe_bed_file(infile)
+cmd_line = "liftOver -minMatch=" + str(minMatch) + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + " > /dev/null 2>&1"
try:
os.system( cmd_line )
except Exception, exc:
- stop_err( "Exception caught attempting conversion: %s" % str( exc ) )
\ No newline at end of file
+ stop_err( "Exception caught attempting conversion: %s" % str( exc ) )
+finally:
+ os.remove(safe_infile)
diff -r 2e8b8b0bc366 -r 8bc85721cbce tools/extract/liftOver_wrapper.xml
--- a/tools/extract/liftOver_wrapper.xml Thu Nov 12 13:12:12 2009 -0500
+++ b/tools/extract/liftOver_wrapper.xml Thu Nov 12 13:20:19 2009 -0500
@@ -42,7 +42,10 @@
.. class:: warningmark
-This tool will only work on interval datasets with chromosome in column 1, start co-ordinate in column 2 and end co-ordinate in column 3. If this is not the case with any line of the input dataset, the tool will return empty output datasets.
+This tool will only work on interval datasets with chromosome in column 1,
+start co-ordinate in column 2 and end co-ordinate in column 3. BED comments
+and track and browser lines will be ignored, but if other non-interval lines
+are present the tool will return empty output datasets.
-----
1
0
Hi,
I am attempting to install Galaxy in our computing grid as a first step to integrating our tools within it. I have everything up and running as far as serving the galaxy suite except the job-submission aspect (to our compute cluster).
We have a commodity cluster using the SGE job scheduling system along with an extra node which acts just as the q-master and job submission node. Galaxy is going to be running on a standalone web-server which may or may not be outside of a firewall disconnecting it from the cluster and head node, although ports may be opened in the firewall if required.
What would be the advised strategy for enabling the web server to submit jobs to our cluster? Am I right in thinking that DRMAA allows remote hosts to submit jobs remotely? Do I need to install the SGE binaries on the web-server and use DRMAA_python to connect to the head node and submit the jobs?
Any advice or direction on how to tackle this would be greatly appreciated.
Cheers,
Matt Goyder
--
matthew.goyder(a)nationwidechildrens.org
Battelle Center for Mathematical Medicine
The Research Institute at Nationwide Children's Hospital
Office: (614)355-2395
----------------------------------------- Confidentiality Notice:
The following mail message, including any attachments, is for the
sole use of the intended recipient(s) and may contain confidential
and privileged information. The recipient is responsible to
maintain the confidentiality of this information and to use the
information only for authorized purposes. If you are not the
intended recipient (or authorized to receive information for the
intended recipient), you are hereby notified that any review, use,
disclosure, distribution, copying, printing, or action taken in
reliance on the contents of this e-mail is strictly prohibited. If
you have received this communication in error, please notify us
immediately by reply e-mail and destroy all copies of the original
message. Thank you.
3
2
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0edb42925161
changeset: 3014:0edb42925161
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 15:13:12 2009 -0500
description:
The job working directory was not being used for _any_ cluster jobs. No idea how this was missed for so long. Data source jobs should now work on the cluster.
diffstat:
lib/galaxy/jobs/runners/pbs.py | 4 ++--
lib/galaxy/jobs/runners/sge.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diffs (33 lines):
diff -r 120a34362c82 -r 0edb42925161 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 14:57:34 2009 -0500
+++ b/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 15:13:12 2009 -0500
@@ -205,7 +205,7 @@
job_attrs[3].value = stageout
job_attrs[4].name = pbs.ATTR_N
job_attrs[4].value = "%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id )
- exec_dir = os.path.abspath( os.getcwd() )
+ exec_dir = os.path.abspath( job_wrapper.working_directory )
# If not, we're using NFS
else:
job_attrs = pbs.new_attropl(3)
@@ -215,7 +215,7 @@
job_attrs[1].value = efile
job_attrs[2].name = pbs.ATTR_N
job_attrs[2].value = "%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id )
- exec_dir = os.getcwd()
+ exec_dir = os.path.abspath( job_wrapper.working_directory )
# write the job script
if self.app.config.pbs_stage_path != '':
diff -r 120a34362c82 -r 0edb42925161 lib/galaxy/jobs/runners/sge.py
--- a/lib/galaxy/jobs/runners/sge.py Wed Nov 11 14:57:34 2009 -0500
+++ b/lib/galaxy/jobs/runners/sge.py Wed Nov 11 15:13:12 2009 -0500
@@ -143,7 +143,7 @@
if sge_queue_name is not None:
jt.setNativeSpecification( "-q %s" % sge_queue_name )
- script = sge_template % (job_wrapper.galaxy_lib_dir, os.getcwd(), command_line)
+ script = sge_template % (job_wrapper.galaxy_lib_dir, os.path.abspath( job_wrapper.working_directory ), command_line)
fh = file( jt.remoteCommand, "w" )
fh.write( script )
fh.close()
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/b33b8f5e03b8
changeset: 3015:b33b8f5e03b8
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Wed Nov 11 15:59:41 2009 -0500
description:
Fix most of the db flushes to be compatibel with sqlalchemy 05. Add the _monkeypatch_query_method() back into assignmapper due to a single object.get() method in the MetadataCollection class since Metadata has no current hook into mapping.context ( the sqlalchemy session ). There a 4 flushes in metadata,py and 20 flushes in model.__init__.py that still use the _monkeypatch_session_method in assignmapper due to the same issue, but all other flushes are fixed.
diffstat:
lib/galaxy/datatypes/metadata.py | 6 +-
lib/galaxy/jobs/__init__.py | 25 +-
lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py | 39 +-
lib/galaxy/model/orm/ext/assignmapper.py | 15 +-
lib/galaxy/security/__init__.py | 1255 +++++----
lib/galaxy/tools/__init__.py | 40 +-
lib/galaxy/tools/actions/__init__.py | 15 +-
lib/galaxy/tools/actions/upload.py | 2 +-
lib/galaxy/tools/actions/upload_common.py | 41 +-
lib/galaxy/tools/parameters/basic.py | 4 +-
lib/galaxy/web/controllers/admin.py | 72 +-
lib/galaxy/web/controllers/async.py | 3 +-
lib/galaxy/web/controllers/dataset.py | 3 +-
lib/galaxy/web/controllers/forms.py | 6 +-
lib/galaxy/web/controllers/history.py | 30 +-
lib/galaxy/web/controllers/library.py | 19 +-
lib/galaxy/web/controllers/library_admin.py | 53 +-
lib/galaxy/web/controllers/library_common.py | 23 +-
lib/galaxy/web/controllers/requests.py | 34 +-
lib/galaxy/web/controllers/requests_admin.py | 71 +-
lib/galaxy/web/controllers/root.py | 18 +-
lib/galaxy/web/controllers/tracks.py | 6 +-
lib/galaxy/web/controllers/user.py | 42 +-
lib/galaxy/web/controllers/workflow.py | 8 +-
lib/galaxy/web/framework/__init__.py | 12 +-
scripts/cleanup_datasets/cleanup_datasets.py | 29 +-
scripts/cleanup_datasets/update_metadata.py | 3 +-
scripts/update_database/update_database_with_security_libraries.py | 229 -
scripts/update_database/update_database_with_security_libraries.sh | 9 -
test/functional/test_DNAse_flanked_genes.py | 6 +-
test/functional/test_history_functions.py | 5 +-
test/functional/test_metadata_editing.py | 2 +-
test/functional/test_security_and_libraries.py | 10 +-
test/functional/test_user_info.py | 6 +-
tools/data_source/hbvar_filter.py | 3 +-
tools/data_source/microbial_import_code.py | 11 +-
tools/emboss_5/emboss_format_corrector.py | 9 +-
tools/filters/axt_to_lav_code.py | 3 +-
tools/filters/lav_to_bed_code.py | 3 +-
tools/maf/maf_to_bed_code.py | 15 +-
tools/visualization/LAJ_code.py | 3 +-
41 files changed, 1065 insertions(+), 1123 deletions(-)
diffs (truncated from 3757 to 3000 lines):
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/datatypes/metadata.py Wed Nov 11 15:59:41 2009 -0500
@@ -391,9 +391,9 @@
return value
if DATABASE_CONNECTION_AVAILABLE:
try:
- # FIXME: GVK ( 10/23/09 ) Can we get a valid db session without this import?
- from galaxy.model.mapping import context as sa_session
- return sa_session.query( galaxy.model.MetadataFile ).get( value )
+ # FIXME: GVK ( 11/11/09 ) had to add the monkey patch back into assignmapper for the get
+ # method for this since Metadata has no hook into mapping.context ( the salalchemy session ).
+ return galaxy.model.MetadataFile.get( value )
except:
#value was not a valid id
return None
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/jobs/__init__.py Wed Nov 11 15:59:41 2009 -0500
@@ -390,7 +390,8 @@
# We need command_line persisted to the db in order for Galaxy to re-queue the job
# if the server was stopped and restarted before the job finished
job.command_line = self.command_line
- job.flush()
+ self.sa_session.add( job )
+ self.sa_session.flush()
# Return list of all extra files
extra_filenames = config_filenames
if param_filename is not None:
@@ -435,11 +436,13 @@
dataset.set_size()
if dataset.ext == 'auto':
dataset.extension = 'data'
- dataset.flush()
+ self.sa_session.add( dataset )
+ self.sa_session.flush()
job.state = model.Job.states.ERROR
job.command_line = self.command_line
job.info = message
- job.flush()
+ self.sa_session.add( job )
+ self.sa_session.flush()
# If the job was deleted, just clean up
self.cleanup()
@@ -452,11 +455,13 @@
dataset.state = state
if info:
dataset.info = info
- dataset.flush()
+ self.sa_session.add( dataset )
+ self.sa_session.flush()
if info:
job.info = info
job.state = state
- job.flush()
+ self.sa_session.add( job )
+ self.sa_session.flush()
def get_state( self ):
job = self.sa_session.query( model.Job ).get( self.job_id )
@@ -468,7 +473,8 @@
self.sa_session.refresh( job )
job.job_runner_name = runner_url
job.job_runner_external_id = external_id
- job.flush()
+ self.sa_session.add( job )
+ self.sa_session.flush()
def finish( self, stdout, stderr ):
"""
@@ -554,7 +560,8 @@
dataset.blurb = "empty"
if dataset.ext == 'auto':
dataset.extension = 'txt'
- dataset.flush()
+ self.sa_session.add( dataset )
+ self.sa_session.flush()
if context['stderr']:
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
@@ -566,7 +573,6 @@
# panel stops checking for updates. So allow the
# self.sa_session.flush() at the bottom of this method set
# the state instead.
- #dataset_assoc.dataset.dataset.flush()
# Save stdout and stderr
if len( stdout ) > 32768:
@@ -839,7 +845,8 @@
job.info = error_msg
else:
job.state = job.states.DELETED
- job.flush()
+ self.sa_session.add( job )
+ self.sa_session.flush()
# if job is in JobQueue or FooJobRunner's put method,
# job_runner_name will be unset and the job will be dequeued due to
# state change above
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py
--- a/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Wed Nov 11 15:59:41 2009 -0500
@@ -155,7 +155,8 @@
# Relationships
if not dataset and create_dataset:
dataset = Dataset( state=Dataset.states.NEW )
- dataset.flush()
+ context.add( dataset )
+ context.flush()
self.dataset = dataset
self.parent_id = parent_id
self.validation_errors = validation_errors
@@ -166,7 +167,8 @@
return self.dataset.state
def set_dataset_state ( self, state ):
self.dataset.state = state
- self.dataset.flush() #flush here, because hda.flush() won't flush the Dataset object
+ context.add( self.dataset )
+ context.flush() #flush here, because hda.flush() won't flush the Dataset object
state = property( get_dataset_state, set_dataset_state )
def get_file_name( self ):
return self.dataset.get_file_name()
@@ -327,7 +329,8 @@
parent_id=parent_id,
copied_from_history_dataset_association=self,
history = target_history )
- hda.flush()
+ context.add( hda )
+ context.flush()
hda.set_size()
# Need to set after flushed, as MetadataFiles require dataset.id
hda.metadata = self.metadata
@@ -337,7 +340,7 @@
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
hda.set_peek()
- hda.flush()
+ context.flush()
return hda
def to_library_dataset_dataset_association( self, target_folder, replace_dataset=None, parent_id=None ):
if replace_dataset:
@@ -347,7 +350,8 @@
# If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new
# LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset.
library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info )
- library_dataset.flush()
+ context.add( library_dataset )
+ context.flush()
ldda = LibraryDatasetDatasetAssociation( name=self.name,
info=self.info,
blurb=self.blurb,
@@ -361,21 +365,24 @@
parent_id=parent_id,
copied_from_history_dataset_association=self,
user=self.history.user )
- ldda.flush()
+ context.add( ldda )
+ context.flush()
# Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
# Must set metadata after ldda flushed, as MetadataFiles require ldda.id
ldda.metadata = self.metadata
if not replace_dataset:
target_folder.add_library_dataset( library_dataset, genome_build=ldda.dbkey )
- target_folder.flush()
+ context.add( target_folder )
+ context.flush()
library_dataset.library_dataset_dataset_association_id = ldda.id
- library_dataset.flush()
+ context.add( library_dataset )
+ context.flush()
for child in self.children:
child_copy = child.to_library_dataset_dataset_association( target_folder=target_folder, replace_dataset=replace_dataset, parent_id=ldda.id )
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
- ldda.flush()
+ context.flush()
return ldda
def clear_associated_files( self, metadata_safe = False, purge = False ):
# metadata_safe = True means to only clear when assoc.metadata_safe == False
@@ -412,13 +419,14 @@
copied_from_library_dataset_dataset_association=self,
history=target_history,
hid=hid )
- hda.flush()
+ context.flush()
hda.metadata = self.metadata #need to set after flushed, as MetadataFiles require dataset.id
for child in self.children:
child_copy = child.to_history_dataset_association( target_history=target_history, parent_id=hda.id )
if not self.datatype.copy_safe_peek:
hda.set_peek() #in some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
- hda.flush()
+ context.add( hda )
+ context.flush()
return hda
def copy( self, copy_children = False, parent_id = None, target_folder = None ):
ldda = LibraryDatasetDatasetAssociation( name=self.name,
@@ -433,7 +441,8 @@
parent_id=parent_id,
copied_from_library_dataset_dataset_association=self,
folder=target_folder )
- ldda.flush()
+ context.add( ldda )
+ context.flush()
# Need to set after flushed, as MetadataFiles require dataset.id
ldda.metadata = self.metadata
if copy_children:
@@ -442,7 +451,7 @@
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
- ldda.flush()
+ context.flush()
return ldda
def clear_associated_files( self, metadata_safe = False, purge = False ):
return
@@ -466,8 +475,8 @@
def set_library_dataset_dataset_association( self, ldda ):
self.library_dataset_dataset_association = ldda
ldda.library_dataset = self
- ldda.flush()
- self.flush()
+ context.add_all( ( self, ldda ) )
+ context.flush()
def get_info( self ):
if self.library_dataset_dataset_association:
return self.library_dataset_dataset_association.info
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/model/orm/ext/assignmapper.py
--- a/lib/galaxy/model/orm/ext/assignmapper.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/model/orm/ext/assignmapper.py Wed Nov 11 15:59:41 2009 -0500
@@ -18,8 +18,20 @@
from sqlalchemy.orm import Query
from sqlalchemy.orm import mapper as sqla_mapper
+def _monkeypatch_query_method( name, session, class_ ):
+ # TODO: eliminate this method by fixing the single query in ~/datatypes/metadata.py ( line 396 )
+ def do(self, *args, **kwargs):
+ return getattr( class_.query, name)(*args, **kwargs)
+ try:
+ do.__name__ = name
+ except:
+ pass
+ if not hasattr(class_, name):
+ setattr(class_, name, classmethod(do))
def _monkeypatch_session_method( name, session, class_ ):
- # TODO: eliminate this method by fixing the session flushes
+ # TODO: eliminate this method by fixing the session flushes in ~/model/__init__.py ( 20 of them )
+ # and ~/datatypes/metadata.py ( 4 of them ). The affected objects have no known hook into mapping.context
+ # ( i.e., sqlalchemy session ).
def do( self, *args, **kwargs ):
if self not in session.deleted:
session.add( self )
@@ -42,6 +54,7 @@
setattr( self, key, value )
cls.__init__ = __init__
cls.query = scoped_session.query_property()
+ _monkeypatch_query_method( 'get', scoped_session, cls )
_monkeypatch_session_method( 'flush', scoped_session, cls )
return sqla_mapper( cls, *arg, **kw )
return mapper( class_, *args, **kwargs )
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/security/__init__.py Wed Nov 11 15:59:41 2009 -0500
@@ -1,625 +1,630 @@
-"""
-Galaxy Security
-
-"""
-import logging, socket
-from datetime import datetime, timedelta
-from galaxy.util.bunch import Bunch
-from galaxy.model.orm import *
-
-log = logging.getLogger(__name__)
-
-class Action( object ):
- def __init__( self, action, description, model ):
- self.action = action
- self.description = description
- self.model = model
-
-class RBACAgent:
- """Class that handles galaxy security"""
- permitted_actions = Bunch(
- DATASET_MANAGE_PERMISSIONS = Action( "manage permissions", "Role members can manage the roles associated with this dataset", "grant" ),
- DATASET_ACCESS = Action( "access", "Role members can import this dataset into their history for analysis", "restrict" ),
- LIBRARY_ADD = Action( "add library item", "Role members can add library items to this library item", "grant" ),
- LIBRARY_MODIFY = Action( "modify library item", "Role members can modify this library item", "grant" ),
- LIBRARY_MANAGE = Action( "manage library permissions", "Role members can manage roles associated with this library item", "grant" )
- )
- def get_action( self, name, default=None ):
- """Get a permitted action by its dict key or action name"""
- for k, v in self.permitted_actions.items():
- if k == name or v.action == name:
- return v
- return default
- def get_actions( self ):
- """Get all permitted actions as a list of Action objects"""
- return self.permitted_actions.__dict__.values()
- def get_item_actions( self, action, item ):
- raise 'No valid method of retrieving action (%s) for item %s.' % ( action, item )
- def guess_derived_permissions_for_datasets( self, datasets = [] ):
- raise "Unimplemented Method"
- def can_access_dataset( self, roles, dataset ):
- raise "Unimplemented Method"
- def can_manage_dataset( self, roles, dataset ):
- raise "Unimplemented Method"
- def can_add_library_item( self, user, roles, item ):
- raise "Unimplemented Method"
- def can_modify_library_item( self, user, roles, item ):
- raise "Unimplemented Method"
- def can_manage_library_item( self, user, roles, item ):
- raise "Unimplemented Method"
- def associate_components( self, **kwd ):
- raise 'No valid method of associating provided components: %s' % kwd
- def create_private_user_role( self, user ):
- raise "Unimplemented Method"
- def get_private_user_role( self, user ):
- raise "Unimplemented Method"
- def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False ):
- raise "Unimplemented Method"
- def history_set_default_permissions( self, history, permissions=None, dataset=False, bypass_manage_permission=False ):
- raise "Unimplemented Method"
- def set_all_dataset_permissions( self, dataset, permissions ):
- raise "Unimplemented Method"
- def set_dataset_permission( self, dataset, permission ):
- raise "Unimplemented Method"
- def set_all_library_permissions( self, dataset, permissions ):
- raise "Unimplemented Method"
- def dataset_is_public( self, dataset ):
- raise "Unimplemented Method"
- def make_dataset_public( self, dataset ):
- raise "Unimplemented Method"
- def get_component_associations( self, **kwd ):
- raise "Unimplemented Method"
- def components_are_associated( self, **kwd ):
- return bool( self.get_component_associations( **kwd ) )
- def convert_permitted_action_strings( self, permitted_action_strings ):
- """
- When getting permitted actions from an untrusted source like a
- form, ensure that they match our actual permitted actions.
- """
- return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] )
-
-class GalaxyRBACAgent( RBACAgent ):
- def __init__( self, model, permitted_actions=None ):
- self.model = model
- if permitted_actions:
- self.permitted_actions = permitted_actions
- # List of "library_item" objects and their associated permissions and info template objects
- self.library_item_assocs = (
- ( self.model.Library, self.model.LibraryPermissions ),
- ( self.model.LibraryFolder, self.model.LibraryFolderPermissions ),
- ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions ),
- ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions ) )
- @property
- def sa_session( self ):
- """
- Returns a SQLAlchemy session -- currently just gets the current
- session from the threadlocal session context, but this is provided
- to allow migration toward a more SQLAlchemy 0.4 style of use.
- """
- return self.model.context.current
- def allow_dataset_action( self, roles, action, dataset ):
- """
- Returns true when user has permission to perform an action on an
- instance of Dataset.
- """
- dataset_actions = self.get_item_actions( action, dataset )
- if not dataset_actions:
- return action.model == 'restrict'
- ret_val = False
- for dataset_action in dataset_actions:
- if dataset_action.role in roles:
- ret_val = True
- break
- return ret_val
- def can_access_dataset( self, roles, dataset ):
- return self.allow_dataset_action( roles, self.permitted_actions.DATASET_ACCESS, dataset )
- def can_manage_dataset( self, roles, dataset ):
- return self.allow_dataset_action( roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset )
- def allow_library_item_action( self, user, roles, action, item ):
- """
- Method for checking a permission for the current user to perform a
- specific library action on a library item, which must be one of:
- Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
- """
- if user is None:
- # All permissions are granted, so non-users cannot have permissions
- return False
- # Check to see if user has access to any of the roles associated with action
- item_actions = self.get_item_actions( action, item )
- if not item_actions:
- # All permissions are granted, so item must have action
- return False
- ret_val = False
- for item_action in item_actions:
- if item_action.role in roles:
- ret_val = True
- break
- return ret_val
- def can_add_library_item( self, user, roles, item ):
- return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_ADD, item )
- def can_modify_library_item( self, user, roles, item ):
- return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MODIFY, item )
- def can_manage_library_item( self, user, roles, item ):
- return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MANAGE, item )
- def get_item_actions( self, action, item ):
- # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
- return [ permission for permission in item.actions if permission.action == action.action ]
- def guess_derived_permissions_for_datasets( self, datasets=[] ):
- """Returns a dict of { action : [ role, role, ... ] } for the output dataset based upon provided datasets"""
- perms = {}
- for dataset in datasets:
- if not isinstance( dataset, self.model.Dataset ):
- dataset = dataset.dataset
- these_perms = {}
- # initialize blank perms
- for action in self.get_actions():
- these_perms[ action ] = []
- # collect this dataset's perms
- these_perms = self.get_dataset_permissions( dataset )
- # join or intersect this dataset's permissions with others
- for action, roles in these_perms.items():
- if action not in perms.keys():
- perms[ action ] = roles
- else:
- if action.model == 'grant':
- # intersect existing roles with new roles
- perms[ action ] = filter( lambda x: x in perms[ action ], roles )
- elif action.model == 'restrict':
- # join existing roles with new roles
- perms[ action ].extend( filter( lambda x: x not in perms[ action ], roles ) )
- return perms
- def associate_components( self, **kwd ):
- if 'user' in kwd:
- if 'group' in kwd:
- return self.associate_user_group( kwd['user'], kwd['group'] )
- elif 'role' in kwd:
- return self.associate_user_role( kwd['user'], kwd['role'] )
- elif 'role' in kwd:
- if 'group' in kwd:
- return self.associate_group_role( kwd['group'], kwd['role'] )
- if 'action' in kwd:
- if 'dataset' in kwd and 'role' in kwd:
- return self.associate_action_dataset_role( kwd['action'], kwd['dataset'], kwd['role'] )
- raise 'No valid method of associating provided components: %s' % kwd
- def associate_user_group( self, user, group ):
- assoc = self.model.UserGroupAssociation( user, group )
- assoc.flush()
- return assoc
- def associate_user_role( self, user, role ):
- assoc = self.model.UserRoleAssociation( user, role )
- assoc.flush()
- return assoc
- def associate_group_role( self, group, role ):
- assoc = self.model.GroupRoleAssociation( group, role )
- assoc.flush()
- return assoc
- def associate_action_dataset_role( self, action, dataset, role ):
- assoc = self.model.DatasetPermissions( action, dataset, role )
- assoc.flush()
- return assoc
- def create_private_user_role( self, user ):
- # Create private role
- role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE )
- role.flush()
- # Add user to role
- self.associate_components( role=role, user=user )
- return role
- def get_private_user_role( self, user, auto_create=False ):
- role = self.sa_session.query( self.model.Role ) \
- .filter( and_( self.model.Role.table.c.name == user.email,
- self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
- .first()
- if not role:
- if auto_create:
- return self.create_private_user_role( user )
- else:
- return None
- return role
- def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False, default_access_private = False ):
- # bypass_manage_permission is used to change permissions of datasets in a userless history when logging in
- if user is None:
- return None
- if not permissions:
- #default permissions
- permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS : [ self.get_private_user_role( user, auto_create=True ) ] }
- #new_user_dataset_access_role_default_private is set as True in config file
- if default_access_private:
- permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ]
- # Delete all of the current default permissions for the user
- for dup in user.default_permissions:
- self.sa_session.delete( dup )
- dup.flush()
- # Add the new default permissions for the user
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for dup in [ self.model.DefaultUserPermissions( user, action, role ) for role in roles ]:
- dup.flush()
- if history:
- for history in user.active_histories:
- self.history_set_default_permissions( history, permissions=permissions, dataset=dataset, bypass_manage_permission=bypass_manage_permission )
- def user_get_default_permissions( self, user ):
- permissions = {}
- for dup in user.default_permissions:
- action = self.get_action( dup.action )
- if action in permissions:
- permissions[ action ].append( dup.role )
- else:
- permissions[ action ] = [ dup.role ]
- return permissions
- def history_set_default_permissions( self, history, permissions={}, dataset=False, bypass_manage_permission=False ):
- # bypass_manage_permission is used to change permissions of datasets in a user-less history when logging in
- user = history.user
- if not user:
- # default permissions on a user-less history are None
- return None
- if not permissions:
- permissions = self.user_get_default_permissions( user )
- # Delete all of the current default permission for the history
- for dhp in history.default_permissions:
- self.sa_session.delete( dhp )
- dhp.flush()
- # Add the new default permissions for the history
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for dhp in [ self.model.DefaultHistoryPermissions( history, action, role ) for role in roles ]:
- dhp.flush()
- if dataset:
- # Only deal with datasets that are not purged
- for hda in history.activatable_datasets:
- dataset = hda.dataset
- if dataset.library_associations:
- # Don't change permissions on a dataset associated with a library
- continue
- if [ assoc for assoc in dataset.history_associations if assoc.history not in user.histories ]:
- # Don't change permissions on a dataset associated with a history not owned by the user
- continue
- if bypass_manage_permission or self.can_manage_dataset( user.all_roles(), dataset ):
- self.set_all_dataset_permissions( dataset, permissions )
- def history_get_default_permissions( self, history ):
- permissions = {}
- for dhp in history.default_permissions:
- action = self.get_action( dhp.action )
- if action in permissions:
- permissions[ action ].append( dhp.role )
- else:
- permissions[ action ] = [ dhp.role ]
- return permissions
- def set_all_dataset_permissions( self, dataset, permissions={} ):
- """
- Set new permissions on a dataset, eliminating all current permissions
- permissions looks like: { Action : [ Role, Role ] }
- """
- # Delete all of the current permissions on the dataset
- # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
- # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
- for dp in dataset.actions:
- self.sa_session.delete( dp )
- dp.flush()
- # Add the new permissions on the dataset
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
- dp.flush()
- def set_dataset_permission( self, dataset, permission={} ):
- """
- Set a specific permission on a dataset, leaving all other current permissions on the dataset alone
- permissions looks like: { Action : [ Role, Role ] }
- """
- # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
- # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
- for action, roles in permission.items():
- if isinstance( action, Action ):
- action = action.action
- # Delete the current specific permission on the dataset if one exists
- for dp in dataset.actions:
- if dp.action == action:
- self.sa_session.delete( dp )
- dp.flush()
- # Add the new specific permission on the dataset
- for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
- dp.flush()
- def dataset_is_public( self, dataset ):
- # A dataset is considered public if there are no "access" actions associated with it. Any
- # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
- return self.permitted_actions.DATASET_ACCESS.action not in [ a.action for a in dataset.actions ]
- def make_dataset_public( self, dataset ):
- # A dataset is considered public if there are no "access" actions associated with it. Any
- # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
- for dp in dataset.actions:
- if dp.action == self.permitted_actions.DATASET_ACCESS.action:
- self.sa_session.delete( dp )
- dp.flush()
- def get_dataset_permissions( self, dataset ):
- """
- Return a dictionary containing the actions and associated roles on dataset.
- The dictionary looks like: { Action : [ Role, Role ] }
- dataset must be an instance of Dataset()
- """
- permissions = {}
- for dp in dataset.actions:
- action = self.get_action( dp.action )
- if action in permissions:
- permissions[ action ].append( dp.role )
- else:
- permissions[ action ] = [ dp.role ]
- return permissions
- def copy_dataset_permissions( self, src, dst ):
- if not isinstance( src, self.model.Dataset ):
- src = src.dataset
- if not isinstance( dst, self.model.Dataset ):
- dst = dst.dataset
- self.set_all_dataset_permissions( dst, self.get_dataset_permissions( src ) )
- def privately_share_dataset( self, dataset, users = [] ):
- intersect = None
- for user in users:
- roles = [ ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING ]
- if intersect is None:
- intersect = roles
- else:
- new_intersect = []
- for role in roles:
- if role in intersect:
- new_intersect.append( role )
- intersect = new_intersect
- sharing_role = None
- if intersect:
- for role in intersect:
- if not filter( lambda x: x not in users, [ ura.user for ura in role.users ] ):
- # only use a role if it contains ONLY the users we're sharing with
- sharing_role = role
- break
- if sharing_role is None:
- sharing_role = self.model.Role( name = "Sharing role for: " + ", ".join( [ u.email for u in users ] ),
- type = self.model.Role.types.SHARING )
- sharing_role.flush()
- for user in users:
- self.associate_components( user=user, role=sharing_role )
- self.set_dataset_permission( dataset, { self.permitted_actions.DATASET_ACCESS : [ sharing_role ] } )
- def set_all_library_permissions( self, library_item, permissions={} ):
- # Set new permissions on library_item, eliminating all current permissions
- for role_assoc in library_item.actions:
- self.sa_session.delete( role_assoc )
- role_assoc.flush()
- # Add the new permissions on library_item
- for item_class, permission_class in self.library_item_assocs:
- if isinstance( library_item, item_class ):
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for role_assoc in [ permission_class( action, library_item, role ) for role in roles ]:
- role_assoc.flush()
- def get_library_dataset_permissions( self, library_dataset ):
- # Permissions will always be the same for LibraryDatasets and associated
- # LibraryDatasetDatasetAssociations
- if isinstance( library_dataset, self.model.LibraryDatasetDatasetAssociation ):
- library_dataset = library_dataset.library_dataset
- permissions = {}
- for library_dataset_permission in library_dataset.actions:
- action = self.get_action( library_dataset_permission.action )
- if action in permissions:
- permissions[ action ].append( library_dataset_permission.role )
- else:
- permissions[ action ] = [ library_dataset_permission.role ]
- return permissions
- def copy_library_permissions( self, source_library_item, target_library_item, user=None ):
- # Copy all permissions from source
- permissions = {}
- for role_assoc in source_library_item.actions:
- if role_assoc.action in permissions:
- permissions[role_assoc.action].append( role_assoc.role )
- else:
- permissions[role_assoc.action] = [ role_assoc.role ]
- self.set_all_library_permissions( target_library_item, permissions )
- if user:
- item_class = None
- for item_class, permission_class in self.library_item_assocs:
- if isinstance( target_library_item, item_class ):
- break
- if item_class:
- # Make sure user's private role is included
- private_role = self.model.security_agent.get_private_user_role( user )
- for name, action in self.permitted_actions.items():
- if not permission_class.filter_by( role_id = private_role.id, action = action.action ).first():
- lp = permission_class( action.action, target_library_item, private_role )
- lp.flush()
- else:
- raise 'Invalid class (%s) specified for target_library_item (%s)' % \
- ( target_library_item.__class__, target_library_item.__class__.__name__ )
- def show_library_item( self, user, roles, library_item, actions_to_check, hidden_folder_ids='' ):
- """
- This method must be sent an instance of Library() or LibraryFolder(). Recursive execution produces a
- comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along with
- the string, True is returned if the current user has permission to perform any 1 of actions_to_check
- on library_item. Otherwise, cycle through all sub-folders in library_item until one is found that meets
- this criteria, if it exists. This method does not necessarily scan the entire library as it returns
- when it finds the first library_item that allows user to perform any one action in actions_to_check.
- """
- for action in actions_to_check:
- if self.allow_library_item_action( user, roles, action, library_item ):
- return True, hidden_folder_ids
- if isinstance( library_item, self.model.Library ):
- return self.show_library_item( user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids='' )
- if isinstance( library_item, self.model.LibraryFolder ):
- for folder in library_item.active_folders:
- can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
- if can_show:
- return True, hidden_folder_ids
- if hidden_folder_ids:
- hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, folder.id )
- else:
- hidden_folder_ids = '%d' % folder.id
- return False, hidden_folder_ids
- def get_showable_folders( self, user, roles, library_item, actions_to_check, hidden_folder_ids=[], showable_folders=[] ):
- """
- This method must be sent an instance of Library(), all the folders of which are scanned to determine if
- user is allowed to perform any action in actions_to_check. The param hidden_folder_ids, if passed, should
- contain a list of folder IDs which was generated when the library was previously scanned
- using the same actions_to_check. A list of showable folders is generated. This method scans the entire library.
- """
- if isinstance( library_item, self.model.Library ):
- return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] )
- if isinstance( library_item, self.model.LibraryFolder ):
- if library_item.id not in hidden_folder_ids:
- for action in actions_to_check:
- if self.allow_library_item_action( user, roles, action, library_item ):
- showable_folders.append( library_item )
- break
- for folder in library_item.active_folders:
- self.get_showable_folders( user, roles, folder, actions_to_check, showable_folders=showable_folders )
- return showable_folders
- def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ):
- for user in users:
- if delete_existing_assocs:
- for a in user.non_private_roles + user.groups:
- self.sa_session.delete( a )
- a.flush()
- self.sa_session.refresh( user )
- for role in roles:
- # Make sure we are not creating an additional association with a PRIVATE role
- if role not in user.roles:
- self.associate_components( user=user, role=role )
- for group in groups:
- self.associate_components( user=user, group=group )
- def set_entity_group_associations( self, groups=[], users=[], roles=[], delete_existing_assocs=True ):
- for group in groups:
- if delete_existing_assocs:
- for a in group.roles + group.users:
- self.sa_session.delete( a )
- a.flush()
- for role in roles:
- self.associate_components( group=group, role=role )
- for user in users:
- self.associate_components( group=group, user=user )
- def set_entity_role_associations( self, roles=[], users=[], groups=[], delete_existing_assocs=True ):
- for role in roles:
- if delete_existing_assocs:
- for a in role.users + role.groups:
- self.sa_session.delete( a )
- a.flush()
- for user in users:
- self.associate_components( user=user, role=role )
- for group in groups:
- self.associate_components( group=group, role=role )
- def get_component_associations( self, **kwd ):
- assert len( kwd ) == 2, 'You must specify exactly 2 Galaxy security components to check for associations.'
- if 'dataset' in kwd:
- if 'action' in kwd:
- return self.sa_session.query( self.model.DatasetPermissions ).filter_by( action = kwd['action'].action, dataset_id = kwd['dataset'].id ).first()
- elif 'user' in kwd:
- if 'group' in kwd:
- return self.sa_session.query( self.model.UserGroupAssociation ).filter_by( group_id = kwd['group'].id, user_id = kwd['user'].id ).first()
- elif 'role' in kwd:
- return self.sa_session.query( self.model.UserRoleAssociation ).filter_by( role_id = kwd['role'].id, user_id = kwd['user'].id ).first()
- elif 'group' in kwd:
- if 'role' in kwd:
- return self.sa_session.query( self.model.GroupRoleAssociation ).filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first()
- raise 'No valid method of associating provided components: %s' % kwd
- def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ):
- """
- This method must always be sent an instance of LibraryFolder(). Recursive execution produces a
- comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along
- with the string, True is returned if the current user has permission to access folder. Otherwise,
- cycle through all sub-folders in folder until one is found that meets this criteria, if it exists.
- This method does not necessarily scan the entire library as it returns when it finds the first
- folder that is accessible to user.
- """
- action = self.permitted_actions.DATASET_ACCESS
- lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \
- .join( "library_dataset" ) \
- .filter( self.model.LibraryDataset.folder == folder ) \
- .join( "dataset" ) \
- .options( eagerload_all( "dataset.actions" ) ) \
- .all()
- for ldda in lddas:
- ldda_access_permissions = self.get_item_actions( action, ldda.dataset )
- if not ldda_access_permissions:
- # Dataset is public
- return True, hidden_folder_ids
- for ldda_access_permission in ldda_access_permissions:
- if ldda_access_permission.role in roles:
- # The current user has access permission on the dataset
- return True, hidden_folder_ids
- for sub_folder in folder.active_folders:
- can_access, hidden_folder_ids = self.check_folder_contents( user, roles, sub_folder, hidden_folder_ids=hidden_folder_ids )
- if can_access:
- return True, hidden_folder_ids
- if hidden_folder_ids:
- hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, sub_folder.id )
- else:
- hidden_folder_ids = '%d' % sub_folder.id
- return False, hidden_folder_ids
-
-class HostAgent( RBACAgent ):
- """
- A simple security agent which allows access to datasets based on host.
- This exists so that externals sites such as UCSC can gain access to
- datasets which have permissions which would normally prevent such access.
- """
- # TODO: Make sites user configurable
- sites = Bunch(
- ucsc_main = ( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
- 'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
- ucsc_test = ( 'hgwdev.cse.ucsc.edu', ),
- ucsc_archaea = ( 'lowepub.cse.ucsc.edu', )
- )
- def __init__( self, model, permitted_actions=None ):
- self.model = model
- if permitted_actions:
- self.permitted_actions = permitted_actions
- @property
- def sa_session( self ):
- return self.model.context.current
- def allow_action( self, addr, action, **kwd ):
- if 'dataset' in kwd and action == self.permitted_actions.DATASET_ACCESS:
- hda = kwd['dataset']
- if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in hda.dataset.actions ]:
- log.debug( 'Allowing access to public dataset with hda: %i.' % hda.id )
- return True # dataset has no roles associated with the access permission, thus is already public
- hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
- .filter_by( history_dataset_association_id = hda.id ).first()
- if not hdadaa:
- log.debug( 'Denying access to private dataset with hda: %i. No hdadaa record for this dataset.' % hda.id )
- return False # no auth
- # We could just look up the reverse of addr, but then we'd also
- # have to verify it with the forward address and special case any
- # IPs (instead of hosts) in the server list.
- #
- # This would be improved by caching, but that's what the OS's name
- # service cache daemon is for (you ARE running nscd, right?).
- for server in HostAgent.sites.get( hdadaa.site, [] ):
- # We're going to search in order, but if the remote site is load
- # balancing their connections (as UCSC does), this is okay.
- try:
- if socket.gethostbyname( server ) == addr:
- break # remote host is in the server list
- except ( socket.error, socket.gaierror ):
- pass # can't resolve, try next
- else:
- log.debug( 'Denying access to private dataset with hda: %i. Remote addr is not a valid server for site: %s.' % ( hda.id, hdadaa.site ) )
- return False # remote addr is not in the server list
- if ( datetime.utcnow() - hdadaa.update_time ) > timedelta( seconds=60 ):
- log.debug( 'Denying access to private dataset with hda: %i. Authorization was granted, but has expired.' % hda.id )
- return False # not authz'd in the last 60 seconds
- log.debug( 'Allowing access to private dataset with hda: %i. Remote server is: %s.' % ( hda.id, server ) )
- return True
- else:
- raise 'The dataset access permission is the only valid permission in the host security agent.'
- def set_dataset_permissions( self, hda, user, site ):
- hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
- .filter_by( history_dataset_association_id = hda.id ).first()
- if hdadaa:
- hdadaa.update_time = datetime.utcnow()
- else:
- hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization( hda=hda, user=user, site=site )
- hdadaa.flush()
-
-def get_permitted_actions( filter=None ):
- '''Utility method to return a subset of RBACAgent's permitted actions'''
- if filter is None:
- return RBACAgent.permitted_actions
- tmp_bunch = Bunch()
- [ tmp_bunch.__dict__.__setitem__(k, v) for k, v in RBACAgent.permitted_actions.items() if k.startswith( filter ) ]
- return tmp_bunch
+"""
+Galaxy Security
+
+"""
+import logging, socket
+from datetime import datetime, timedelta
+from galaxy.util.bunch import Bunch
+from galaxy.model.orm import *
+
+log = logging.getLogger(__name__)
+
+class Action( object ):
+ def __init__( self, action, description, model ):
+ self.action = action
+ self.description = description
+ self.model = model
+
+class RBACAgent:
+ """Class that handles galaxy security"""
+ permitted_actions = Bunch(
+ DATASET_MANAGE_PERMISSIONS = Action( "manage permissions", "Role members can manage the roles associated with this dataset", "grant" ),
+ DATASET_ACCESS = Action( "access", "Role members can import this dataset into their history for analysis", "restrict" ),
+ LIBRARY_ADD = Action( "add library item", "Role members can add library items to this library item", "grant" ),
+ LIBRARY_MODIFY = Action( "modify library item", "Role members can modify this library item", "grant" ),
+ LIBRARY_MANAGE = Action( "manage library permissions", "Role members can manage roles associated with this library item", "grant" )
+ )
+ def get_action( self, name, default=None ):
+ """Get a permitted action by its dict key or action name"""
+ for k, v in self.permitted_actions.items():
+ if k == name or v.action == name:
+ return v
+ return default
+ def get_actions( self ):
+ """Get all permitted actions as a list of Action objects"""
+ return self.permitted_actions.__dict__.values()
+ def get_item_actions( self, action, item ):
+ raise 'No valid method of retrieving action (%s) for item %s.' % ( action, item )
+ def guess_derived_permissions_for_datasets( self, datasets = [] ):
+ raise "Unimplemented Method"
+ def can_access_dataset( self, roles, dataset ):
+ raise "Unimplemented Method"
+ def can_manage_dataset( self, roles, dataset ):
+ raise "Unimplemented Method"
+ def can_add_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def can_modify_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def can_manage_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def associate_components( self, **kwd ):
+ raise 'No valid method of associating provided components: %s' % kwd
+ def create_private_user_role( self, user ):
+ raise "Unimplemented Method"
+ def get_private_user_role( self, user ):
+ raise "Unimplemented Method"
+ def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False ):
+ raise "Unimplemented Method"
+ def history_set_default_permissions( self, history, permissions=None, dataset=False, bypass_manage_permission=False ):
+ raise "Unimplemented Method"
+ def set_all_dataset_permissions( self, dataset, permissions ):
+ raise "Unimplemented Method"
+ def set_dataset_permission( self, dataset, permission ):
+ raise "Unimplemented Method"
+ def set_all_library_permissions( self, dataset, permissions ):
+ raise "Unimplemented Method"
+ def dataset_is_public( self, dataset ):
+ raise "Unimplemented Method"
+ def make_dataset_public( self, dataset ):
+ raise "Unimplemented Method"
+ def get_component_associations( self, **kwd ):
+ raise "Unimplemented Method"
+ def components_are_associated( self, **kwd ):
+ return bool( self.get_component_associations( **kwd ) )
+ def convert_permitted_action_strings( self, permitted_action_strings ):
+ """
+ When getting permitted actions from an untrusted source like a
+ form, ensure that they match our actual permitted actions.
+ """
+ return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] )
+
+class GalaxyRBACAgent( RBACAgent ):
+ def __init__( self, model, permitted_actions=None ):
+ self.model = model
+ if permitted_actions:
+ self.permitted_actions = permitted_actions
+ # List of "library_item" objects and their associated permissions and info template objects
+ self.library_item_assocs = (
+ ( self.model.Library, self.model.LibraryPermissions ),
+ ( self.model.LibraryFolder, self.model.LibraryFolderPermissions ),
+ ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions ),
+ ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions ) )
+ @property
+ def sa_session( self ):
+ """Returns a SQLAlchemy session"""
+ return self.model.context
+ def allow_dataset_action( self, roles, action, dataset ):
+ """
+ Returns true when user has permission to perform an action on an
+ instance of Dataset.
+ """
+ dataset_actions = self.get_item_actions( action, dataset )
+ if not dataset_actions:
+ return action.model == 'restrict'
+ ret_val = False
+ for dataset_action in dataset_actions:
+ if dataset_action.role in roles:
+ ret_val = True
+ break
+ return ret_val
+ def can_access_dataset( self, roles, dataset ):
+ return self.allow_dataset_action( roles, self.permitted_actions.DATASET_ACCESS, dataset )
+ def can_manage_dataset( self, roles, dataset ):
+ return self.allow_dataset_action( roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset )
+ def allow_library_item_action( self, user, roles, action, item ):
+ """
+ Method for checking a permission for the current user to perform a
+ specific library action on a library item, which must be one of:
+ Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+ """
+ if user is None:
+ # All permissions are granted, so non-users cannot have permissions
+ return False
+ # Check to see if user has access to any of the roles associated with action
+ item_actions = self.get_item_actions( action, item )
+ if not item_actions:
+ # All permissions are granted, so item must have action
+ return False
+ ret_val = False
+ for item_action in item_actions:
+ if item_action.role in roles:
+ ret_val = True
+ break
+ return ret_val
+ def can_add_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_ADD, item )
+ def can_modify_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MODIFY, item )
+ def can_manage_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MANAGE, item )
+ def get_item_actions( self, action, item ):
+ # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+ return [ permission for permission in item.actions if permission.action == action.action ]
+ def guess_derived_permissions_for_datasets( self, datasets=[] ):
+ """Returns a dict of { action : [ role, role, ... ] } for the output dataset based upon provided datasets"""
+ perms = {}
+ for dataset in datasets:
+ if not isinstance( dataset, self.model.Dataset ):
+ dataset = dataset.dataset
+ these_perms = {}
+ # initialize blank perms
+ for action in self.get_actions():
+ these_perms[ action ] = []
+ # collect this dataset's perms
+ these_perms = self.get_dataset_permissions( dataset )
+ # join or intersect this dataset's permissions with others
+ for action, roles in these_perms.items():
+ if action not in perms.keys():
+ perms[ action ] = roles
+ else:
+ if action.model == 'grant':
+ # intersect existing roles with new roles
+ perms[ action ] = filter( lambda x: x in perms[ action ], roles )
+ elif action.model == 'restrict':
+ # join existing roles with new roles
+ perms[ action ].extend( filter( lambda x: x not in perms[ action ], roles ) )
+ return perms
+ def associate_components( self, **kwd ):
+ if 'user' in kwd:
+ if 'group' in kwd:
+ return self.associate_user_group( kwd['user'], kwd['group'] )
+ elif 'role' in kwd:
+ return self.associate_user_role( kwd['user'], kwd['role'] )
+ elif 'role' in kwd:
+ if 'group' in kwd:
+ return self.associate_group_role( kwd['group'], kwd['role'] )
+ if 'action' in kwd:
+ if 'dataset' in kwd and 'role' in kwd:
+ return self.associate_action_dataset_role( kwd['action'], kwd['dataset'], kwd['role'] )
+ raise 'No valid method of associating provided components: %s' % kwd
+ def associate_user_group( self, user, group ):
+ assoc = self.model.UserGroupAssociation( user, group )
+ self.sa_session.add( assoc )
+ self.sa_session.flush()
+ return assoc
+ def associate_user_role( self, user, role ):
+ assoc = self.model.UserRoleAssociation( user, role )
+ self.sa_session.add( assoc )
+ self.sa_session.flush()
+ return assoc
+ def associate_group_role( self, group, role ):
+ assoc = self.model.GroupRoleAssociation( group, role )
+ self.sa_session.add( assoc )
+ self.sa_session.flush()
+ return assoc
+ def associate_action_dataset_role( self, action, dataset, role ):
+ assoc = self.model.DatasetPermissions( action, dataset, role )
+ self.sa_session.add( assoc )
+ self.sa_session.flush()
+ return assoc
+ def create_private_user_role( self, user ):
+ # Create private role
+ role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE )
+ self.sa_session.add( role )
+ self.sa_session.flush()
+ # Add user to role
+ self.associate_components( role=role, user=user )
+ return role
+ def get_private_user_role( self, user, auto_create=False ):
+ role = self.sa_session.query( self.model.Role ) \
+ .filter( and_( self.model.Role.table.c.name == user.email,
+ self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
+ .first()
+ if not role:
+ if auto_create:
+ return self.create_private_user_role( user )
+ else:
+ return None
+ return role
+ def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False, default_access_private = False ):
+ # bypass_manage_permission is used to change permissions of datasets in a userless history when logging in
+ if user is None:
+ return None
+ if not permissions:
+ #default permissions
+ permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS : [ self.get_private_user_role( user, auto_create=True ) ] }
+ #new_user_dataset_access_role_default_private is set as True in config file
+ if default_access_private:
+ permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ]
+ # Delete all of the current default permissions for the user
+ for dup in user.default_permissions:
+ self.sa_session.delete( dup )
+ # Add the new default permissions for the user
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for dup in [ self.model.DefaultUserPermissions( user, action, role ) for role in roles ]:
+ self.sa_session.add( dup )
+ self.sa_session.flush()
+ if history:
+ for history in user.active_histories:
+ self.history_set_default_permissions( history, permissions=permissions, dataset=dataset, bypass_manage_permission=bypass_manage_permission )
+ def user_get_default_permissions( self, user ):
+ permissions = {}
+ for dup in user.default_permissions:
+ action = self.get_action( dup.action )
+ if action in permissions:
+ permissions[ action ].append( dup.role )
+ else:
+ permissions[ action ] = [ dup.role ]
+ return permissions
+ def history_set_default_permissions( self, history, permissions={}, dataset=False, bypass_manage_permission=False ):
+ # bypass_manage_permission is used to change permissions of datasets in a user-less history when logging in
+ user = history.user
+ if not user:
+ # default permissions on a user-less history are None
+ return None
+ if not permissions:
+ permissions = self.user_get_default_permissions( user )
+ # Delete all of the current default permission for the history
+ for dhp in history.default_permissions:
+ self.sa_session.delete( dhp )
+ # Add the new default permissions for the history
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for dhp in [ self.model.DefaultHistoryPermissions( history, action, role ) for role in roles ]:
+ self.sa_session.add( dhp )
+ self.sa_session.flush()
+ if dataset:
+ # Only deal with datasets that are not purged
+ for hda in history.activatable_datasets:
+ dataset = hda.dataset
+ if dataset.library_associations:
+ # Don't change permissions on a dataset associated with a library
+ continue
+ if [ assoc for assoc in dataset.history_associations if assoc.history not in user.histories ]:
+ # Don't change permissions on a dataset associated with a history not owned by the user
+ continue
+ if bypass_manage_permission or self.can_manage_dataset( user.all_roles(), dataset ):
+ self.set_all_dataset_permissions( dataset, permissions )
+ def history_get_default_permissions( self, history ):
+ permissions = {}
+ for dhp in history.default_permissions:
+ action = self.get_action( dhp.action )
+ if action in permissions:
+ permissions[ action ].append( dhp.role )
+ else:
+ permissions[ action ] = [ dhp.role ]
+ return permissions
+ def set_all_dataset_permissions( self, dataset, permissions={} ):
+ """
+ Set new permissions on a dataset, eliminating all current permissions
+ permissions looks like: { Action : [ Role, Role ] }
+ """
+ # Delete all of the current permissions on the dataset
+ # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
+ # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
+ for dp in dataset.actions:
+ self.sa_session.delete( dp )
+ # Add the new permissions on the dataset
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
+ self.sa_session.add( dp )
+ self.sa_session.flush()
+ def set_dataset_permission( self, dataset, permission={} ):
+ """
+ Set a specific permission on a dataset, leaving all other current permissions on the dataset alone
+ permissions looks like: { Action : [ Role, Role ] }
+ """
+ # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
+ # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
+ for action, roles in permission.items():
+ if isinstance( action, Action ):
+ action = action.action
+ # Delete the current specific permission on the dataset if one exists
+ for dp in dataset.actions:
+ if dp.action == action:
+ self.sa_session.delete( dp )
+ # Add the new specific permission on the dataset
+ for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
+ self.sa_session.add( dp )
+ self.sa_session.flush()
+ def dataset_is_public( self, dataset ):
+ # A dataset is considered public if there are no "access" actions associated with it. Any
+ # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+ return self.permitted_actions.DATASET_ACCESS.action not in [ a.action for a in dataset.actions ]
+ def make_dataset_public( self, dataset ):
+ # A dataset is considered public if there are no "access" actions associated with it. Any
+ # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+ for dp in dataset.actions:
+ if dp.action == self.permitted_actions.DATASET_ACCESS.action:
+ self.sa_session.delete( dp )
+ self.sa_session.flush()
+ def get_dataset_permissions( self, dataset ):
+ """
+ Return a dictionary containing the actions and associated roles on dataset.
+ The dictionary looks like: { Action : [ Role, Role ] }
+ dataset must be an instance of Dataset()
+ """
+ permissions = {}
+ for dp in dataset.actions:
+ action = self.get_action( dp.action )
+ if action in permissions:
+ permissions[ action ].append( dp.role )
+ else:
+ permissions[ action ] = [ dp.role ]
+ return permissions
+ def copy_dataset_permissions( self, src, dst ):
+ if not isinstance( src, self.model.Dataset ):
+ src = src.dataset
+ if not isinstance( dst, self.model.Dataset ):
+ dst = dst.dataset
+ self.set_all_dataset_permissions( dst, self.get_dataset_permissions( src ) )
+ def privately_share_dataset( self, dataset, users = [] ):
+ intersect = None
+ for user in users:
+ roles = [ ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING ]
+ if intersect is None:
+ intersect = roles
+ else:
+ new_intersect = []
+ for role in roles:
+ if role in intersect:
+ new_intersect.append( role )
+ intersect = new_intersect
+ sharing_role = None
+ if intersect:
+ for role in intersect:
+ if not filter( lambda x: x not in users, [ ura.user for ura in role.users ] ):
+ # only use a role if it contains ONLY the users we're sharing with
+ sharing_role = role
+ break
+ if sharing_role is None:
+ sharing_role = self.model.Role( name = "Sharing role for: " + ", ".join( [ u.email for u in users ] ),
+ type = self.model.Role.types.SHARING )
+ self.sa_session.add( sharing_role )
+ self.sa_session.flush()
+ for user in users:
+ self.associate_components( user=user, role=sharing_role )
+ self.set_dataset_permission( dataset, { self.permitted_actions.DATASET_ACCESS : [ sharing_role ] } )
+ def set_all_library_permissions( self, library_item, permissions={} ):
+ # Set new permissions on library_item, eliminating all current permissions
+ for role_assoc in library_item.actions:
+ self.sa_session.delete( role_assoc )
+ # Add the new permissions on library_item
+ for item_class, permission_class in self.library_item_assocs:
+ if isinstance( library_item, item_class ):
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for role_assoc in [ permission_class( action, library_item, role ) for role in roles ]:
+ self.sa_session.add( role_assoc )
+ self.sa_session.flush()
+ def get_library_dataset_permissions( self, library_dataset ):
+ # Permissions will always be the same for LibraryDatasets and associated
+ # LibraryDatasetDatasetAssociations
+ if isinstance( library_dataset, self.model.LibraryDatasetDatasetAssociation ):
+ library_dataset = library_dataset.library_dataset
+ permissions = {}
+ for library_dataset_permission in library_dataset.actions:
+ action = self.get_action( library_dataset_permission.action )
+ if action in permissions:
+ permissions[ action ].append( library_dataset_permission.role )
+ else:
+ permissions[ action ] = [ library_dataset_permission.role ]
+ return permissions
+ def copy_library_permissions( self, source_library_item, target_library_item, user=None ):
+ # Copy all permissions from source
+ permissions = {}
+ for role_assoc in source_library_item.actions:
+ if role_assoc.action in permissions:
+ permissions[role_assoc.action].append( role_assoc.role )
+ else:
+ permissions[role_assoc.action] = [ role_assoc.role ]
+ self.set_all_library_permissions( target_library_item, permissions )
+ if user:
+ item_class = None
+ for item_class, permission_class in self.library_item_assocs:
+ if isinstance( target_library_item, item_class ):
+ break
+ if item_class:
+ # Make sure user's private role is included
+ private_role = self.model.security_agent.get_private_user_role( user )
+ for name, action in self.permitted_actions.items():
+ if not permission_class.filter_by( role_id = private_role.id, action = action.action ).first():
+ lp = permission_class( action.action, target_library_item, private_role )
+ self.sa_session.add( lp )
+ self.sa_session.flush()
+ else:
+ raise 'Invalid class (%s) specified for target_library_item (%s)' % \
+ ( target_library_item.__class__, target_library_item.__class__.__name__ )
+ def show_library_item( self, user, roles, library_item, actions_to_check, hidden_folder_ids='' ):
+ """
+ This method must be sent an instance of Library() or LibraryFolder(). Recursive execution produces a
+ comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along with
+ the string, True is returned if the current user has permission to perform any 1 of actions_to_check
+ on library_item. Otherwise, cycle through all sub-folders in library_item until one is found that meets
+ this criteria, if it exists. This method does not necessarily scan the entire library as it returns
+ when it finds the first library_item that allows user to perform any one action in actions_to_check.
+ """
+ for action in actions_to_check:
+ if self.allow_library_item_action( user, roles, action, library_item ):
+ return True, hidden_folder_ids
+ if isinstance( library_item, self.model.Library ):
+ return self.show_library_item( user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids='' )
+ if isinstance( library_item, self.model.LibraryFolder ):
+ for folder in library_item.active_folders:
+ can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
+ if can_show:
+ return True, hidden_folder_ids
+ if hidden_folder_ids:
+ hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, folder.id )
+ else:
+ hidden_folder_ids = '%d' % folder.id
+ return False, hidden_folder_ids
+ def get_showable_folders( self, user, roles, library_item, actions_to_check, hidden_folder_ids=[], showable_folders=[] ):
+ """
+ This method must be sent an instance of Library(), all the folders of which are scanned to determine if
+ user is allowed to perform any action in actions_to_check. The param hidden_folder_ids, if passed, should
+ contain a list of folder IDs which was generated when the library was previously scanned
+ using the same actions_to_check. A list of showable folders is generated. This method scans the entire library.
+ """
+ if isinstance( library_item, self.model.Library ):
+ return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] )
+ if isinstance( library_item, self.model.LibraryFolder ):
+ if library_item.id not in hidden_folder_ids:
+ for action in actions_to_check:
+ if self.allow_library_item_action( user, roles, action, library_item ):
+ showable_folders.append( library_item )
+ break
+ for folder in library_item.active_folders:
+ self.get_showable_folders( user, roles, folder, actions_to_check, showable_folders=showable_folders )
+ return showable_folders
+ def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ):
+ for user in users:
+ if delete_existing_assocs:
+ for a in user.non_private_roles + user.groups:
+ self.sa_session.delete( a )
+ self.sa_session.flush()
+ self.sa_session.refresh( user )
+ for role in roles:
+ # Make sure we are not creating an additional association with a PRIVATE role
+ if role not in user.roles:
+ self.associate_components( user=user, role=role )
+ for group in groups:
+ self.associate_components( user=user, group=group )
+ def set_entity_group_associations( self, groups=[], users=[], roles=[], delete_existing_assocs=True ):
+ for group in groups:
+ if delete_existing_assocs:
+ for a in group.roles + group.users:
+ self.sa_session.delete( a )
+ self.sa_session.flush()
+ for role in roles:
+ self.associate_components( group=group, role=role )
+ for user in users:
+ self.associate_components( group=group, user=user )
+ def set_entity_role_associations( self, roles=[], users=[], groups=[], delete_existing_assocs=True ):
+ for role in roles:
+ if delete_existing_assocs:
+ for a in role.users + role.groups:
+ self.sa_session.delete( a )
+ self.sa_session.flush()
+ for user in users:
+ self.associate_components( user=user, role=role )
+ for group in groups:
+ self.associate_components( group=group, role=role )
+ def get_component_associations( self, **kwd ):
+ assert len( kwd ) == 2, 'You must specify exactly 2 Galaxy security components to check for associations.'
+ if 'dataset' in kwd:
+ if 'action' in kwd:
+ return self.sa_session.query( self.model.DatasetPermissions ).filter_by( action = kwd['action'].action, dataset_id = kwd['dataset'].id ).first()
+ elif 'user' in kwd:
+ if 'group' in kwd:
+ return self.sa_session.query( self.model.UserGroupAssociation ).filter_by( group_id = kwd['group'].id, user_id = kwd['user'].id ).first()
+ elif 'role' in kwd:
+ return self.sa_session.query( self.model.UserRoleAssociation ).filter_by( role_id = kwd['role'].id, user_id = kwd['user'].id ).first()
+ elif 'group' in kwd:
+ if 'role' in kwd:
+ return self.sa_session.query( self.model.GroupRoleAssociation ).filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first()
+ raise 'No valid method of associating provided components: %s' % kwd
+ def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ):
+ """
+ This method must always be sent an instance of LibraryFolder(). Recursive execution produces a
+ comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along
+ with the string, True is returned if the current user has permission to access folder. Otherwise,
+ cycle through all sub-folders in folder until one is found that meets this criteria, if it exists.
+ This method does not necessarily scan the entire library as it returns when it finds the first
+ folder that is accessible to user.
+ """
+ action = self.permitted_actions.DATASET_ACCESS
+ lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \
+ .join( "library_dataset" ) \
+ .filter( self.model.LibraryDataset.folder == folder ) \
+ .join( "dataset" ) \
+ .options( eagerload_all( "dataset.actions" ) ) \
+ .all()
+ for ldda in lddas:
+ ldda_access_permissions = self.get_item_actions( action, ldda.dataset )
+ if not ldda_access_permissions:
+ # Dataset is public
+ return True, hidden_folder_ids
+ for ldda_access_permission in ldda_access_permissions:
+ if ldda_access_permission.role in roles:
+ # The current user has access permission on the dataset
+ return True, hidden_folder_ids
+ for sub_folder in folder.active_folders:
+ can_access, hidden_folder_ids = self.check_folder_contents( user, roles, sub_folder, hidden_folder_ids=hidden_folder_ids )
+ if can_access:
+ return True, hidden_folder_ids
+ if hidden_folder_ids:
+ hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, sub_folder.id )
+ else:
+ hidden_folder_ids = '%d' % sub_folder.id
+ return False, hidden_folder_ids
+
+class HostAgent( RBACAgent ):
+ """
+ A simple security agent which allows access to datasets based on host.
+ This exists so that externals sites such as UCSC can gain access to
+ datasets which have permissions which would normally prevent such access.
+ """
+ # TODO: Make sites user configurable
+ sites = Bunch(
+ ucsc_main = ( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
+ 'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
+ ucsc_test = ( 'hgwdev.cse.ucsc.edu', ),
+ ucsc_archaea = ( 'lowepub.cse.ucsc.edu', )
+ )
+ def __init__( self, model, permitted_actions=None ):
+ self.model = model
+ if permitted_actions:
+ self.permitted_actions = permitted_actions
+ @property
+ def sa_session( self ):
+ """Returns a SQLAlchemy session"""
+ return self.model.context
+ def allow_action( self, addr, action, **kwd ):
+ if 'dataset' in kwd and action == self.permitted_actions.DATASET_ACCESS:
+ hda = kwd['dataset']
+ if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in hda.dataset.actions ]:
+ log.debug( 'Allowing access to public dataset with hda: %i.' % hda.id )
+ return True # dataset has no roles associated with the access permission, thus is already public
+ hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
+ .filter_by( history_dataset_association_id = hda.id ).first()
+ if not hdadaa:
+ log.debug( 'Denying access to private dataset with hda: %i. No hdadaa record for this dataset.' % hda.id )
+ return False # no auth
+ # We could just look up the reverse of addr, but then we'd also
+ # have to verify it with the forward address and special case any
+ # IPs (instead of hosts) in the server list.
+ #
+ # This would be improved by caching, but that's what the OS's name
+ # service cache daemon is for (you ARE running nscd, right?).
+ for server in HostAgent.sites.get( hdadaa.site, [] ):
+ # We're going to search in order, but if the remote site is load
+ # balancing their connections (as UCSC does), this is okay.
+ try:
+ if socket.gethostbyname( server ) == addr:
+ break # remote host is in the server list
+ except ( socket.error, socket.gaierror ):
+ pass # can't resolve, try next
+ else:
+ log.debug( 'Denying access to private dataset with hda: %i. Remote addr is not a valid server for site: %s.' % ( hda.id, hdadaa.site ) )
+ return False # remote addr is not in the server list
+ if ( datetime.utcnow() - hdadaa.update_time ) > timedelta( seconds=60 ):
+ log.debug( 'Denying access to private dataset with hda: %i. Authorization was granted, but has expired.' % hda.id )
+ return False # not authz'd in the last 60 seconds
+ log.debug( 'Allowing access to private dataset with hda: %i. Remote server is: %s.' % ( hda.id, server ) )
+ return True
+ else:
+ raise 'The dataset access permission is the only valid permission in the host security agent.'
+ def set_dataset_permissions( self, hda, user, site ):
+ hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
+ .filter_by( history_dataset_association_id = hda.id ).first()
+ if hdadaa:
+ hdadaa.update_time = datetime.utcnow()
+ else:
+ hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization( hda=hda, user=user, site=site )
+ self.sa_session.add( hdadaa )
+ self.sa_session.flush()
+
+def get_permitted_actions( filter=None ):
+ '''Utility method to return a subset of RBACAgent's permitted actions'''
+ if filter is None:
+ return RBACAgent.permitted_actions
+ tmp_bunch = Bunch()
+ [ tmp_bunch.__dict__.__setitem__(k, v) for k, v in RBACAgent.permitted_actions.items() if k.startswith( filter ) ]
+ return tmp_bunch
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/tools/__init__.py Wed Nov 11 15:59:41 2009 -0500
@@ -272,7 +272,10 @@
self.app = app
# Parse XML element containing configuration
self.parse( root )
-
+ @property
+ def sa_session( self ):
+ """Returns a SQLAlchemy session"""
+ return self.app.model.context
def parse( self, root ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -863,7 +866,7 @@
if 'async_datasets' in inputs and inputs['async_datasets'] not in [ 'None', '', None ]:
for id in inputs['async_datasets'].split(','):
try:
- data = trans.sa_session.query( trans.model.HistoryDatasetAssociation ).get( int( id ) )
+ data = self.sa_session.query( trans.model.HistoryDatasetAssociation ).get( int( id ) )
except:
log.exception( 'Unable to load precreated dataset (%s) sent in upload form' % id )
continue
@@ -874,7 +877,8 @@
else:
data.state = data.states.ERROR
data.info = 'Upload of this dataset was interrupted. Please try uploading again or'
- data.flush()
+ self.sa_session.add( data )
+ self.sa_session.flush()
# It's unlikely the user will ever see this.
return 'message.mako', dict( message_type='error', message='Your upload was interrupted. If this was uninentional, please retry it.', refresh_frames=[], cont=None )
@@ -1454,7 +1458,8 @@
if data.missing_meta():
data = app.datatypes_registry.change_datatype( data, 'tabular' )
data.set_peek()
- data.flush()
+ self.sa_session.add( data )
+ self.sa_session.flush()
def collect_associated_files( self, output, job_working_directory ):
for name, hda in output.items():
@@ -1493,7 +1498,8 @@
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
shutil.move( filename, child_dataset.file_name )
- child_dataset.flush()
+ self.sa_session.add( child_dataset )
+ self.sa_session.flush()
child_dataset.set_size()
child_dataset.name = "Secondary Dataset (%s)" % ( designation )
child_dataset.init_meta()
@@ -1507,16 +1513,19 @@
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s|%s__' % ( name, designation ), child_dataset )
assoc.job = job
- assoc.flush()
+ self.sa_session.add( assoc )
+ self.sa_session.flush()
child_dataset.state = outdata.state
- child_dataset.flush()
+ self.sa_session.add( child_dataset )
+ self.sa_session.flush()
# Add child to return dict
children[name][designation] = child_dataset
for dataset in outdata.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
if outdata == dataset: continue
# Create new child dataset
child_data = child_dataset.copy( parent_id = dataset.id )
- child_data.flush()
+ self.sa_session.add( child_dataset )
+ self.sa_session.flush()
return children
def collect_primary_datasets( self, output):
@@ -1540,7 +1549,8 @@
# Create new primary dataset
primary_data = self.app.model.HistoryDatasetAssociation( extension=ext, designation=designation, visible=visible, dbkey=dbkey, create_dataset=True )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, primary_data.dataset )
- primary_data.flush()
+ self.sa_session.add( primary_data )
+ self.sa_session.flush()
# Move data from temp location to dataset location
shutil.move( filename, primary_data.file_name )
primary_data.set_size()
@@ -1558,9 +1568,11 @@
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s|%s__' % ( name, designation ), primary_data )
assoc.job = job
- assoc.flush()
+ self.sa_session.add( assoc )
+ self.sa_session.flush()
primary_data.state = outdata.state
- primary_data.flush()
+ self.sa_session.add( primary_data )
+ self.sa_session.flush()
outdata.history.add_dataset( primary_data )
# Add dataset to return dict
primary_datasets[name][designation] = primary_data
@@ -1568,7 +1580,8 @@
if outdata == dataset: continue
new_data = primary_data.copy()
dataset.history.add( new_data )
- new_data.flush()
+ self.sa_session.add( new_data )
+ self.sa_session.flush()
return primary_datasets
class SetMetadataTool( Tool ):
@@ -1581,7 +1594,8 @@
# For now, we'll leave the default metadata and set the state back to its original.
dataset.datatype.after_edit( dataset )
dataset.state = param_dict.get( '__ORIGINAL_DATASET_STATE__' )
- dataset.flush()
+ self.sa_session.add( dataset )
+ self.sa_session.flush()
# ---- Utility classes to be factored out -----------------------------------
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/tools/actions/__init__.py Wed Nov 11 15:59:41 2009 -0500
@@ -44,9 +44,11 @@
new_data = data.datatype.convert_dataset( trans, data, target_ext, return_output = True, visible = False ).values()[0]
new_data.hid = data.hid
new_data.name = data.name
- new_data.flush()
+ trans.sa_session.add( new_data )
+ trans.sa_session.flush()
assoc.dataset = new_data
- assoc.flush()
+ trans.sa_session.add( assoc )
+ trans.sa_session.flush()
data = new_data
user, roles = trans.get_user_and_roles()
if data and not trans.app.security_agent.can_access_dataset( roles, data.dataset ):
@@ -198,7 +200,8 @@
ext = when_elem.get( 'format', ext )
data = trans.app.model.HistoryDatasetAssociation( extension=ext, create_dataset=True )
# Commit the dataset immediately so it gets database assigned unique id
- data.flush()
+ trans.sa_session.add( data )
+ trans.sa_session.flush()
trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
# Create an empty file immediately
open( data.file_name, "w" ).close()
@@ -241,7 +244,8 @@
if name not in child_dataset_names and name not in incoming: #don't add children; or already existing datasets, i.e. async created
data = out_data[ name ]
trans.history.add_dataset( data, set_hid = set_output_hid )
- data.flush()
+ trans.sa_session.add( data )
+ trans.sa_session.flush()
# Add all the children to their parents
for parent_name, child_name in parent_to_child_pairs:
parent_dataset = out_data[ parent_name ]
@@ -293,7 +297,8 @@
# Job should not be queued, so set state to ok
job.state = JOB_OK
job.info = "Redirected to: %s" % redirect_url
- job.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
trans.response.send_redirect( url_for( controller='tool_runner', action='redirect', redirect_url=redirect_url ) )
else:
# Queue the job for execution
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/tools/actions/upload.py Wed Nov 11 15:59:41 2009 -0500
@@ -21,6 +21,6 @@
if not uploaded_datasets:
return 'No data was entered in the upload form, please go back and choose data to upload.'
- json_file_path = upload_common.create_paramfile( uploaded_datasets )
+ json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
data_list = [ ud.data for ud in uploaded_datasets ]
return upload_common.create_job( trans, incoming, tool, json_file_path, data_list )
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/tools/actions/upload_common.py Wed Nov 11 15:59:41 2009 -0500
@@ -117,7 +117,8 @@
hda.state = state
else:
hda.state = hda.states.QUEUED
- hda.flush()
+ trans.sa_session.add( hda )
+ trans.sa_session.flush()
trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey )
permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
@@ -142,14 +143,16 @@
new_folder = trans.app.model.LibraryFolder( name=name, description='Automatically created by upload tool' )
new_folder.genome_build = util.dbnames.default_value
folder.add_folder( new_folder )
- new_folder.flush()
+ trans.sa_session.add( new_folder )
+ trans.sa_session.flush()
trans.app.security_agent.copy_library_permissions( folder, new_folder )
folder = new_folder
if library_bunch.replace_dataset:
ld = library_bunch.replace_dataset
else:
ld = trans.app.model.LibraryDataset( folder=folder, name=uploaded_dataset.name )
- ld.flush()
+ trans.sa_session.add( ld )
+ trans.sa_session.flush()
trans.app.security_agent.copy_library_permissions( folder, ld )
ldda = trans.app.model.LibraryDatasetDatasetAssociation( name = uploaded_dataset.name,
extension = uploaded_dataset.file_type,
@@ -162,7 +165,8 @@
else:
ldda.state = ldda.states.QUEUED
ldda.message = library_bunch.message
- ldda.flush()
+ trans.sa_session.add( ldda )
+ trans.sa_session.flush()
# Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
trans.app.security_agent.copy_library_permissions( ld, ldda )
if library_bunch.replace_dataset:
@@ -172,9 +176,11 @@
# Copy the current user's DefaultUserPermissions to the new LibraryDatasetDatasetAssociation.dataset
trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, trans.app.security_agent.user_get_default_permissions( trans.user ) )
folder.add_library_dataset( ld, genome_build=uploaded_dataset.dbkey )
- folder.flush()
+ trans.sa_session.add( folder )
+ trans.sa_session.flush()
ld.library_dataset_dataset_association_id = ldda.id
- ld.flush()
+ trans.sa_session.add( ld )
+ trans.sa_session.flush()
# Handle template included in the upload form, if any
if library_bunch.template and library_bunch.template_field_contents:
# Since information templates are inherited, the template fields can be displayed on the upload form.
@@ -182,15 +188,18 @@
# for the new library_dataset_dataset_association object.
# Create a new FormValues object, using the template we previously retrieved
form_values = trans.app.model.FormValues( library_bunch.template, library_bunch.template_field_contents )
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
# Create a new info_association between the current ldda and form_values
info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( ldda, library_bunch.template, form_values )
- info_association.flush()
+ trans.sa_session.add( info_association )
+ trans.sa_session.flush()
# If roles were selected upon upload, restrict access to the Dataset to those roles
if library_bunch.roles:
for role in library_bunch.roles:
dp = trans.app.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, ldda.dataset, role )
- dp.flush()
+ trans.sa_session.add( dp )
+ trans.sa_session.flush()
return ldda
def new_upload( trans, uploaded_dataset, library_bunch=None, state=None ):
@@ -210,16 +219,18 @@
else:
data.extension = uploaded_dataset.file_type
data.dbkey = uploaded_dataset.dbkey
- data.flush()
+ trans.sa_session.add( data )
+ trans.sa_session.flush()
if library_bunch:
library_bunch.folder.genome_build = uploaded_dataset.dbkey
- library_bunch.folder.flush()
+ trans.sa_session.add( library_bunch.folder )
+ trans.sa_session.flush()
else:
trans.history.genome_build = uploaded_dataset.dbkey
uploaded_dataset.data = data
return uploaded_datasets
-def create_paramfile( uploaded_datasets ):
+def create_paramfile( trans, uploaded_datasets ):
"""
Create the upload tool's JSON "param" file.
"""
@@ -233,7 +244,8 @@
data.init_meta()
for meta_name, meta_value in uploaded_dataset.metadata.iteritems():
setattr( data.metadata, meta_name, meta_value )
- data.flush()
+ trans.sa_session.add( data )
+ trans.sa_session.flush()
json = dict( file_type = uploaded_dataset.file_type,
dataset_id = data.dataset.id,
dbkey = uploaded_dataset.dbkey,
@@ -278,7 +290,8 @@
job.tool_id = tool.id
job.tool_version = tool.version
job.state = job.states.UPLOAD
- job.flush()
+ trans.sa_session.add( job )
+ trans.sa_session.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/tools/parameters/basic.py Wed Nov 11 15:59:41 2009 -0500
@@ -726,8 +726,10 @@
>>> # Mock up a history (not connected to database)
>>> from galaxy.model import History, HistoryDatasetAssociation
>>> from galaxy.util.bunch import Bunch
+ >>> from galaxy.model.mapping import context as sa_session
>>> hist = History()
- >>> hist.flush()
+ >>> sa_session.add( hist )
+ >>> sa_session.flush()
>>> hist.add_dataset( HistoryDatasetAssociation( id=1, extension='interval', create_dataset=True ) )
>>> dtp = DataToolParameter( None, XML( '<param name="blah" type="data" format="interval"/>' ) )
>>> print dtp.name
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/admin.py Wed Nov 11 15:59:41 2009 -0500
@@ -350,23 +350,24 @@
else:
# Create the role
role = trans.app.model.Role( name=name, description=description, type=trans.app.model.Role.types.ADMIN )
- role.flush()
+ trans.sa_session.add( role )
# Create the UserRoleAssociations
for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
ura = trans.app.model.UserRoleAssociation( user, role )
- ura.flush()
+ trans.sa_session.add( ura )
# Create the GroupRoleAssociations
for group in [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in in_groups ]:
gra = trans.app.model.GroupRoleAssociation( group, role )
- gra.flush()
+ trans.sa_session.add( gra )
if create_group_for_role == 'yes':
# Create the group
group = trans.app.model.Group( name=name )
- group.flush()
+ trans.sa_session.add( group )
msg = "Group '%s' has been created, and role '%s' has been created with %d associated users and %d associated groups" % \
( group.name, role.name, len( in_users ), len( in_groups ) )
else:
msg = "Role '%s' has been created with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) )
+ trans.sa_session.flush()
trans.response.send_redirect( web.url_for( controller='admin', action='roles', message=util.sanitize_text( msg ), status='done' ) )
trans.response.send_redirect( web.url_for( controller='admin', action='create_role', msg=util.sanitize_text( msg ), messagetype='error' ) )
out_users = []
@@ -402,13 +403,12 @@
for dup in user.default_permissions:
if role == dup.role:
trans.sa_session.delete( dup )
- dup.flush()
# Delete DefaultHistoryPermissions for previously associated users that have been removed from the role
for history in user.histories:
for dhp in history.default_permissions:
if role == dhp.role:
trans.sa_session.delete( dhp )
- dhp.flush()
+ trans.sa_session.flush()
in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups )
trans.sa_session.refresh( role )
@@ -428,7 +428,8 @@
else:
role.name = new_name
role.description = new_description
- role.flush()
+ trans.sa_session.add( role )
+ trans.sa_session.flush()
msg = "Role '%s' has been renamed to '%s'" % ( old_name, new_name )
return trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( msg ), status='done' ) )
return trans.fill_template( '/admin/dataset_security/role/role_rename.mako', role=role, msg=msg, messagetype=messagetype )
@@ -491,7 +492,8 @@
params = util.Params( kwd )
role = get_role( trans, params.id )
role.deleted = True
- role.flush()
+ trans.sa_session.add( role )
+ trans.sa_session.flush()
message = "Role '%s' has been marked as deleted." % role.name
trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
@web.expose
@@ -500,7 +502,8 @@
params = util.Params( kwd )
role = get_role( trans, params.id )
role.deleted = False
- role.flush()
+ trans.sa_session.add( role )
+ trans.sa_session.flush()
message = "Role '%s' has been marked as not deleted." % role.name
trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
@web.expose
@@ -525,23 +528,19 @@
for dup in user.default_permissions:
if role == dup.role:
trans.sa_session.delete( dup )
- dup.flush()
# Delete DefaultHistoryPermissions for associated users
for history in user.histories:
for dhp in history.default_permissions:
if role == dhp.role:
trans.sa_session.delete( dhp )
- dhp.flush()
trans.sa_session.delete( ura )
- ura.flush()
# Delete GroupRoleAssociations
for gra in role.groups:
trans.sa_session.delete( gra )
- gra.flush()
# Delete DatasetPermissionss
for dp in role.dataset_actions:
trans.sa_session.delete( dp )
- dp.flush()
+ trans.sa_session.flush()
message = "The following have been purged from the database for role '%s': " % role.name
message += "DefaultUserPermissions, DefaultHistoryPermissions, UserRoleAssociations, GroupRoleAssociations, DatasetPermissionss."
trans.response.send_redirect( web.url_for( action='roles', message=util.sanitize_text( message ), status='done' ) )
@@ -592,7 +591,8 @@
return trans.fill_template( '/admin/dataset_security/group/group_rename.mako', group=group, msg=msg, messagetype='error' )
else:
group.name = new_name
- group.flush()
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
msg = "Group '%s' has been renamed to '%s'" % ( old_name, new_name )
return trans.response.send_redirect( web.url_for( action='groups', msg=util.sanitize_text( msg ), messagetype='done' ) )
return trans.fill_template( '/admin/dataset_security/group/group_rename.mako', group=group, msg=msg, messagetype=messagetype )
@@ -640,15 +640,18 @@
else:
# Create the group
group = trans.app.model.Group( name=name )
- group.flush()
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
# Create the UserRoleAssociations
for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
uga = trans.app.model.UserGroupAssociation( user, group )
- uga.flush()
+ trans.sa_session.add( uga )
+ trans.sa_session.flush()
# Create the GroupRoleAssociations
for role in [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]:
gra = trans.app.model.GroupRoleAssociation( group, role )
- gra.flush()
+ trans.sa_session.add( gra )
+ trans.sa_session.flush()
msg = "Group '%s' has been created with %d associated users and %d associated roles" % ( name, len( in_users ), len( in_roles ) )
trans.response.send_redirect( web.url_for( controller='admin', action='groups', message=util.sanitize_text( msg ), status='done' ) )
trans.response.send_redirect( web.url_for( controller='admin', action='create_group', msg=util.sanitize_text( msg ), messagetype='error' ) )
@@ -675,7 +678,8 @@
params = util.Params( kwd )
group = get_group( trans, params.id )
group.deleted = True
- group.flush()
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
msg = "Group '%s' has been marked as deleted." % group.name
trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( msg ), status='done' ) )
@web.expose
@@ -684,7 +688,8 @@
params = util.Params( kwd )
group = get_group( trans, params.id )
group.deleted = False
- group.flush()
+ trans.sa_session.add( group )
+ trans.sa_session.flush()
msg = "Group '%s' has been marked as not deleted." % group.name
trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( msg ), status='done' ) )
@web.expose
@@ -701,12 +706,10 @@
# Delete UserGroupAssociations
for uga in group.users:
trans.sa_session.delete( uga )
- uga.flush()
# Delete GroupRoleAssociations
for gra in group.roles:
trans.sa_session.delete( gra )
- gra.flush()
- # Delete the Group
+ trans.sa_session.flush()
message = "The following have been purged from the database for group '%s': UserGroupAssociations, GroupRoleAssociations." % group.name
trans.response.send_redirect( web.url_for( action='groups', message=util.sanitize_text( message ), status='done' ) )
@@ -756,7 +759,8 @@
user.set_password_cleartext( password )
if trans.app.config.use_remote_user:
user.external = True
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
trans.app.security_agent.create_private_user_role( user )
trans.app.security_agent.user_set_default_permissions( user, history=False, dataset=False )
message = 'Created new user account (%s)' % user.email
@@ -804,7 +808,8 @@
break
else:
user.set_password_cleartext( password )
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
if not message and not status:
message = "Passwords reset for %d users" % len( ids )
status = 'done'
@@ -831,7 +836,8 @@
for user_id in ids:
user = get_user( trans, user_id )
user.deleted = True
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
message += " %s " % user.email
trans.response.send_redirect( web.url_for( action='users', message=util.sanitize_text( message ), status='done' ) )
@web.expose
@@ -848,7 +854,8 @@
user = get_user( trans, user_id )
if user.deleted:
user.deleted = False
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
count += 1
undeleted_users += " %s" % user.email
message = "Undeleted %d users: %s" % ( count, undeleted_users )
@@ -895,23 +902,22 @@
# Delete Dataset
if not d.deleted:
d.deleted = True
- d.flush()
+ trans.sa_session.add( d )
hda.deleted = True
- hda.flush()
+ trans.sa_session.add( hda )
h.deleted = True
- h.flush()
+ trans.sa_session.add( h )
# Delete UserGroupAssociations
for uga in user.groups:
trans.sa_session.delete( uga )
- uga.flush()
# Delete UserRoleAssociations EXCEPT FOR THE PRIVATE ROLE
for ura in user.roles:
if ura.role_id != private_role.id:
trans.sa_session.delete( ura )
- ura.flush()
# Purge the user
user.purged = True
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
message += "%s " % user.email
trans.response.send_redirect( web.url_for( controller='admin',
action='users',
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/async.py Wed Nov 11 15:59:41 2009 -0500
@@ -109,9 +109,10 @@
data.dbkey = GALAXY_BUILD
data.info = GALAXY_INFO
data.state = data.states.NEW
- data.flush()
+ trans.sa_session.add( data )
open( data.file_name, 'wb' ).close() #create the file
trans.history.add_dataset( data, genome_build=GALAXY_BUILD )
+ trans.sa_session.add( trans.history )
trans.sa_session.flush()
trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Wed Nov 11 15:59:41 2009 -0500
@@ -356,7 +356,8 @@
if new_history_name:
new_history.name = new_history_name
new_history.user = user
- new_history.flush()
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
target_history_ids.append( new_history.id )
if user:
target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )]
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/forms.py Wed Nov 11 15:59:41 2009 -0500
@@ -113,7 +113,8 @@
messagetype = params.get( 'messagetype', 'done' )
fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( util.restore_text( params.form_id ) ) )
fd.form_definition_current.deleted = True
- fd.form_definition_current.flush()
+ trans.sa_session.add( fd.form_definition_current )
+ trans.sa_session.flush()
return self._show_forms_list(trans,
msg='The form definition named %s is deleted.' % fd.name,
messagetype='done')
@@ -125,7 +126,8 @@
messagetype = params.get( 'messagetype', 'done' )
fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( util.restore_text( params.form_id ) ) )
fd.form_definition_current.deleted = False
- fd.form_definition_current.flush()
+ trans.sa_session.add( fd.form_definition_current )
+ trans.sa_session.flush()
return self._show_forms_list(trans,
msg='The form definition named %s is undeleted.' % fd.name,
messagetype='done')
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/history.py Wed Nov 11 15:59:41 2009 -0500
@@ -306,7 +306,8 @@
except:
association = None
new_history.add_galaxy_session( galaxy_session, association=association )
- new_history.flush()
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
trans.set_history( new_history )
# No message
return None, None
@@ -335,7 +336,7 @@
# Current user is the user with which the histories were shared
association = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=trans.user, history=history ).one()
trans.sa_session.delete( association )
- association.flush()
+ trans.sa_session.flush()
message = "Unshared %d shared histories" % len( ids )
status = 'done'
# Render the list view
@@ -348,7 +349,8 @@
return trans.show_error_message( "History (%s) has been shared with others, unshare it before deleting it. " % history.name )
if not history.deleted:
history.deleted = True
- history.flush()
+ trans.sa_session.add( history )
+ trans.sa_session.flush()
trans.log_event( "History id %d marked as deleted" % history.id )
# Regardless of whether it was previously deleted, we make a new history active
trans.new_history()
@@ -365,6 +367,7 @@
assert history.user == trans.user
# Rename
history.name = new_name
+ trans.sa_session.add( history )
trans.sa_session.flush()
@web.expose
@@ -406,7 +409,8 @@
except:
association = None
new_history.add_galaxy_session( galaxy_session, association=association )
- new_history.flush()
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
if not user_history.datasets:
trans.set_history( new_history )
return trans.show_ok_message( """
@@ -424,7 +428,8 @@
except:
association = None
new_history.add_galaxy_session( galaxy_session, association=association )
- new_history.flush()
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
trans.set_history( new_history )
return trans.show_ok_message( """
History "%s" has been imported. Click <a href="%s">here</a>
@@ -731,9 +736,8 @@
share = trans.app.model.HistoryUserShareAssociation()
share.history = history
share.user = send_to_user
- session = trans.sa_session
- session.add( share )
- session.flush()
+ trans.sa_session.add( share )
+ trans.sa_session.flush()
if history not in shared_histories:
shared_histories.append( history )
if send_to_err:
@@ -752,12 +756,13 @@
if ids:
histories = [ get_history( trans, history_id ) for history_id in ids ]
for history in histories:
+ trans.sa_session.add( history )
if params.get( 'enable_import_via_link', False ):
history.importable = True
- history.flush()
+ trans.sa_session.flush()
elif params.get( 'disable_import_via_link', False ):
history.importable = False
- history.flush()
+ trans.sa_session.flush()
elif params.get( 'unshare_user', False ):
user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( kwd[ 'unshare_user' ] ) )
if not user:
@@ -767,7 +772,7 @@
if husas:
for husa in husas:
trans.sa_session.delete( husa )
- husa.flush()
+ trans.sa_session.flush()
histories = []
# Get all histories that have been shared with others
husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
@@ -818,7 +823,8 @@
elif name[i] not in [None,'',' ']:
name[i] = escape(name[i])
histories[i].name = name[i]
- histories[i].flush()
+ trans.sa_session.add( histories[i] )
+ trans.sa_session.flush()
change_msg = change_msg + "<p>History: "+cur_names[i]+" renamed to: "+name[i]+"</p>"
trans.log_event( "History renamed: id: %s, renamed to: '%s'" % (str(histories[i].id), name[i] ) )
else:
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/library.py Wed Nov 11 15:59:41 2009 -0500
@@ -169,11 +169,11 @@
else:
library.name = new_name
library.description = new_description
- library.flush()
# Rename the root_folder
library.root_folder.name = new_name
library.root_folder.description = new_description
- library.root_folder.flush()
+ trans.sa_session.add_all( ( library, library.root_folder ) )
+ trans.sa_session.flush()
msg = "Library '%s' has been renamed to '%s'" % ( old_name, new_name )
return trans.response.send_redirect( web.url_for( controller='library',
action='library',
@@ -240,7 +240,8 @@
# ? unspecified (?)
new_folder.genome_build = util.dbnames.default_value
folder.add_folder( new_folder )
- new_folder.flush()
+ trans.sa_session.add( new_folder )
+ trans.sa_session.flush()
# New folders default to having the same permissions as their parent folder
trans.app.security_agent.copy_library_permissions( folder, new_folder )
msg = "New folder named '%s' has been added to the library" % new_folder.name
@@ -273,7 +274,8 @@
else:
folder.name = new_name
folder.description = new_description
- folder.flush()
+ trans.sa_session.add( folder )
+ trans.sa_session.flush()
msg = "Folder '%s' has been renamed to '%s'" % ( old_name, new_name )
return trans.response.send_redirect( web.url_for( controller='library',
action='folder',
@@ -359,7 +361,8 @@
else:
library_dataset.name = new_name
library_dataset.info = new_info
- library_dataset.flush()
+ trans.sa_session.add( library_dataset )
+ trans.sa_session.flush()
msg = "Dataset '%s' has been renamed to '%s'" % ( old_name, new_name )
messagetype = 'done'
else:
@@ -502,7 +505,8 @@
elif params.get( 'delete', False ):
if trans.app.security_agent.can_modify_library_item( user, roles, folder ):
ldda.deleted = True
- ldda.flush()
+ trans.sa_session.add( ldda )
+ trans.sa_session.flush()
msg = 'Dataset %s has been removed from this data library' % ldda.name
messagetype = 'done'
else:
@@ -894,7 +898,8 @@
for ldda_id in ldda_ids:
ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id )
hda = ldda.to_history_dataset_association( target_history=history, add_to_history = True )
- history.flush()
+ trans.sa_session.add( history )
+ trans.sa_session.flush()
msg = "%i dataset(s) have been imported into your history" % len( ldda_ids )
return trans.response.send_redirect( web.url_for( controller='library',
action='browse_library',
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/library_admin.py
--- a/lib/galaxy/web/controllers/library_admin.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/library_admin.py Wed Nov 11 15:59:41 2009 -0500
@@ -89,9 +89,9 @@
library = trans.app.model.Library( name = util.restore_text( params.name ),
description = util.restore_text( params.description ) )
root_folder = trans.app.model.LibraryFolder( name = util.restore_text( params.name ), description = "" )
- root_folder.flush()
library.root_folder = root_folder
- library.flush()
+ trans.sa_session.add_all( ( library, root_folder ) )
+ trans.sa_session.flush()
msg = "The new library named '%s' has been created" % library.name
return trans.response.send_redirect( web.url_for( controller='library_admin',
action='browse_library',
@@ -116,11 +116,11 @@
else:
library.name = new_name
library.description = new_description
- library.flush()
# Rename the root_folder
library.root_folder.name = new_name
library.root_folder.description = new_description
- library.root_folder.flush()
+ trans.sa_session.add_all( ( library, library.root_folder ) )
+ trans.sa_session.flush()
msg = "Library '%s' has been renamed to '%s'" % ( old_name, new_name )
return trans.response.send_redirect( web.url_for( controller='library_admin',
action='library',
@@ -148,15 +148,17 @@
# to deleted. This allows for the library to be undeleted ( before it is purged ),
# restoring all of its contents.
ldda.deleted = True
- ldda.flush()
+ trans.sa_session.add( ldda )
library_dataset.deleted = True
- library_dataset.flush()
+ trans.sa_session.add( library_dataset )
library_folder.deleted = True
- library_folder.flush()
+ trans.sa_session.add( library_folder )
+ trans.sa_session.flush()
trans.sa_session.refresh( library )
delete_folder( library.root_folder )
library.deleted = True
- library.flush()
+ trans.sa_session.add( library )
+ trans.sa_session.flush()
msg = "Library '%s' and all of its contents have been marked deleted" % library.name
return trans.response.send_redirect( web.url_for( action='browse_libraries', msg=util.sanitize_text( msg ), messagetype='done' ) )
elif action == 'permissions':
@@ -218,14 +220,14 @@
# us, as well as removing the file from disk.
#if not dataset.deleted and len( dataset.active_library_associations ) <= 1: # This is our current ldda
dataset.deleted = True
- dataset.flush()
ldda.deleted = True
- ldda.flush()
+ trans.sa_session.add_all( ( dataset, ldda ) )
library_dataset.deleted = True
- library_dataset.flush()
+ trans.sa_session.add( library_dataset )
library_folder.deleted = True
library_folder.purged = True
- library_folder.flush()
+ trans.sa_session.add( library_folder )
+ trans.sa_session.flush()
if not library.deleted:
msg = "Library '%s' has not been marked deleted, so it cannot be purged" % ( library.name )
return trans.response.send_redirect( web.url_for( controller='library_admin',
@@ -235,7 +237,8 @@
else:
purge_folder( library.root_folder )
library.purged = True
- library.flush()
+ trans.sa_session.add( library )
+ trans.sa_session.flush()
msg = "Library '%s' and all of its contents have been purged, datasets will be removed from disk via the cleanup_datasets script" % library.name
return trans.response.send_redirect( web.url_for( controller='library_admin',
action='deleted_libraries',
@@ -273,7 +276,8 @@
# ? unspecified (?)
new_folder.genome_build = util.dbnames.default_value
folder.add_folder( new_folder )
- new_folder.flush()
+ trans.sa_session.add( new_folder )
+ trans.sa_session.flush()
# New folders default to having the same permissions as their parent folder
trans.app.security_agent.copy_library_permissions( folder, new_folder )
msg = "New folder named '%s' has been added to the library" % new_folder.name
@@ -305,7 +309,8 @@
else:
folder.name = new_name
folder.description = new_description
- folder.flush()
+ trans.sa_session.add( folder )
+ trans.sa_session.flush()
msg = "Folder '%s' has been renamed to '%s'" % ( old_name, new_name )
return trans.response.send_redirect( web.url_for( controller='library_admin',
action='folder',
@@ -322,7 +327,8 @@
messagetype=messagetype )
elif action == 'delete':
folder.deleted = True
- folder.flush()
+ trans.sa_session.add( folder )
+ trans.sa_session.flush()
msg = "Folder '%s' and all of its contents have been marked deleted" % folder.name
return trans.response.send_redirect( web.url_for( action='browse_library',
obj_id=library_id,
@@ -379,7 +385,8 @@
else:
library_dataset.name = new_name
library_dataset.info = new_info
- library_dataset.flush()
+ trans.sa_session.add( library_dataset )
+ trans.sa_session.flush()
msg = "Dataset '%s' has been renamed to '%s'" % ( old_name, new_name )
messagetype = 'done'
return trans.fill_template( '/admin/library/library_dataset_info.mako',
@@ -499,7 +506,8 @@
messagetype=messagetype )
elif params.get( 'delete', False ):
ldda.deleted = True
- ldda.flush()
+ trans.sa_session.add( ldda )
+ trans.sa_session.flush()
msg = 'Dataset %s has been removed from this data library' % ldda.name
return trans.fill_template( "/admin/library/ldda_edit_info.mako",
ldda=ldda,
@@ -893,7 +901,8 @@
for ldda_id in ldda_ids:
ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id )
ldda.deleted = True
- ldda.flush()
+ trans.sa_session.add( ldda )
+ trans.sa_session.flush()
msg = "The selected datasets have been removed from this data library"
trans.response.send_redirect( web.url_for( controller='library_admin',
action='browse_library',
@@ -930,7 +939,8 @@
library_item_desc = library_item_type.capitalize()
library_item = trans.sa_session.query( library_item_types[ library_item_type ] ).get( int( library_item_id ) )
library_item.deleted = True
- library_item.flush()
+ trans.sa_session.add( library_item )
+ trans.sa_session.flush()
msg = util.sanitize_text( "%s '%s' has been marked deleted" % ( library_item_desc, library_item.name ) )
messagetype = 'done'
if library_item_type == 'library':
@@ -961,7 +971,8 @@
messagetype = 'error'
else:
library_item.deleted = False
- library_item.flush()
+ trans.sa_session.add( library_item )
+ trans.sa_session.flush()
msg = util.sanitize_text( "%s '%s' has been marked undeleted" % ( library_item_desc, library_item.name ) )
messagetype = 'done'
if library_item_type == 'library':
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Nov 11 15:59:41 2009 -0500
@@ -97,7 +97,7 @@
upload_option=upload_option,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
- json_file_path = upload_common.create_paramfile( uploaded_datasets )
+ json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
data_list = [ ud.data for ud in uploaded_datasets ]
return upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder )
def make_library_uploaded_dataset( self, trans, params, name, path, type, library_bunch, in_folder=None ):
@@ -116,7 +116,8 @@
if params.get( 'link_data_only', False ):
uploaded_dataset.link_data_only = True
uploaded_dataset.data.file_name = os.path.abspath( path )
- uploaded_dataset.data.flush()
+ trans.sa_session.add( uploaded_dataset.data )
+ trans.sa_session.data.flush()
return uploaded_dataset
def get_server_dir_uploaded_datasets( self, trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg ):
files = []
@@ -248,14 +249,16 @@
form = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( kwd[ 'form_id' ] ) )
#fields = list( copy.deepcopy( form.fields ) )
form_values = trans.app.model.FormValues( form, [] )
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
if folder_id:
assoc = trans.app.model.LibraryFolderInfoAssociation( library_item, form, form_values )
elif ldda_id:
assoc = trans.app.model.LibraryDatasetDatasetInfoAssociation( library_item, form, form_values )
else:
assoc = trans.app.model.LibraryInfoAssociation( library_item, form, form_values )
- assoc.flush()
+ trans.sa_session.add( assoc )
+ trans.sa_session.flush()
msg = 'An information template based on the form "%s" has been added to this %s.' % ( form.name, library_item_desc )
trans.response.send_redirect( web.url_for( controller=cntrller,
action=response_action,
@@ -315,21 +318,25 @@
# Update existing content only if it has changed
if form_values.content != field_contents:
form_values.content = field_contents
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
else:
# Inherit the next available info_association so we can get the template
info_association, inherited = library_item.get_info_association()
template = info_association.template
# Create a new FormValues object
form_values = trans.app.model.FormValues( template, field_contents )
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
# Create a new info_association between the current library item and form_values
if library_item_type == 'folder':
info_association = trans.app.model.LibraryFolderInfoAssociation( library_item, template, form_values )
- info_association.flush()
+ trans.sa_session.add( info_association )
+ trans.sa_session.flush()
elif library_item_type == 'library_dataset_dataset_association':
info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( library_item, template, form_values )
- info_association.flush()
+ trans.sa_session.add( info_association )
+ trans.sa_session.flush()
msg = 'The information has been updated.'
return trans.response.send_redirect( web.url_for( controller=cntrller,
action=response_action,
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/requests.py Wed Nov 11 15:59:41 2009 -0500
@@ -300,9 +300,11 @@
for field_index in range(len(request.type.sample_form.fields)):
sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) ))
form_values = trans.app.model.FormValues(request.type.sample_form, sample_values)
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
s = trans.app.model.Sample(sample_name, '', request, form_values)
- s.flush()
+ trans.sa_session.add( s )
+ trans.sa_session.flush()
else:
for sample_index in range(len(current_samples)):
sample_name = current_samples[sample_index][0]
@@ -314,9 +316,9 @@
if sample:
form_values = trans.sa_session.query( trans.app.model.FormValues ).get( sample.values.id )
form_values.content = sample_values
- form_values.flush()
sample.name = new_sample_name
- sample.flush()
+ trans.sa_session.add( sample )
+ trans.sa_session.flush()
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
operation='show_request',
@@ -350,8 +352,7 @@
s = request.has_sample(sample_name)
if s:
trans.sa_session.delete( s )
- s.flush()
- request.flush()
+ trans.sa_session.flush()
del current_samples[sample_index]
return trans.fill_template( '/requests/show_request.mako',
request=request,
@@ -635,7 +636,8 @@
user_address.postal_code = util.restore_text(params.get('field_%i_postal_code' % index, ''))
user_address.country = util.restore_text(params.get('field_%i_country' % index, ''))
user_address.phone = util.restore_text(params.get('field_%i_phone' % index, ''))
- user_address.flush()
+ trans.sa_session.add( user_address )
+ trans.sa_session.flush()
trans.sa_session.refresh( trans.user )
values.append(int(user_address.id))
elif value == unicode('none'):
@@ -647,13 +649,15 @@
else:
values.append(util.restore_text(params.get('field_%i' % index, '')))
form_values = trans.app.model.FormValues(request_type.request_form, values)
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
if not request:
request = trans.app.model.Request(name, desc, request_type,
trans.user, form_values,
library=library, folder=folder,
state=trans.app.model.Request.states.UNSUBMITTED)
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
else:
request.name = name
request.desc = desc
@@ -772,7 +776,8 @@
message='This request cannot be deleted as it is already been submitted',
**kwd) )
request.deleted = True
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
return trans.response.send_redirect( web.url_for( controller='requests',
@@ -793,7 +798,8 @@
**kwd) )
# change request's submitted field
request.deleted = False
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
return trans.response.send_redirect( web.url_for( controller='requests',
@@ -824,10 +830,12 @@
new_state = request.type.states[0]
for s in request.samples:
event = trans.app.model.SampleEvent(s, new_state, 'Samples submitted to the system')
- event.flush()
+ trans.sa_session.add( event )
+ trans.sa_session.flush()
# change request's submitted field
request.state = request.states.SUBMITTED
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
kwd['status'] = 'done'
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Nov 11 15:59:41 2009 -0500
@@ -199,7 +199,8 @@
message='This request cannot be deleted as it is already been submitted',
**kwd) )
request.deleted = True
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
return trans.response.send_redirect( web.url_for( controller='requests_admin',
@@ -221,7 +222,8 @@
**kwd) )
# change request's submitted field
request.deleted = False
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
return trans.response.send_redirect( web.url_for( controller='requests_admin',
@@ -253,10 +255,11 @@
new_state = request.type.states[0]
for s in request.samples:
event = trans.app.model.SampleEvent(s, new_state, 'Samples submitted to the system')
- event.flush()
+ trans.sa_session.add( event )
# change request's submitted field
request.state = request.states.SUBMITTED
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
kwd['status'] = 'done'
@@ -579,7 +582,8 @@
user_address.postal_code = util.restore_text(params.get('field_%i_postal_code' % index, ''))
user_address.country = util.restore_text(params.get('field_%i_country' % index, ''))
user_address.phone = util.restore_text(params.get('field_%i_phone' % index, ''))
- user_address.flush()
+ trans.sa_session.add( user_address )
+ trans.sa_session.flush()
trans.sa_session.refresh( trans.user )
values.append(int(user_address.id))
elif value == unicode('none'):
@@ -591,13 +595,14 @@
else:
values.append(util.restore_text(params.get('field_%i' % index, '')))
form_values = trans.app.model.FormValues(request_type.request_form, values)
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
if not request:
request = trans.app.model.Request(name, desc, request_type,
user, form_values,
library=library, folder=folder,
state=trans.app.model.Request.states.UNSUBMITTED)
- request.flush()
+ trans.sa_session.add( request )
else:
request.name = name
request.desc = desc
@@ -606,7 +611,8 @@
request.values = form_values
request.library = library
request.folder = folder
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
return request
@@ -671,7 +677,8 @@
**kwd) )
# change request's submitted field
request.state = request.states.UNSUBMITTED
- request.flush()
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
kwd['status'] = 'done'
@@ -797,9 +804,11 @@
for field_index in range(len(request.type.sample_form.fields)):
sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) ))
form_values = trans.app.model.FormValues(request.type.sample_form, sample_values)
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
s = trans.app.model.Sample(sample_name, '', request, form_values)
- s.flush()
+ trans.sa_session.add( s )
+ trans.sa_session.flush()
else:
for index in range(len(current_samples)):
sample_index = index
@@ -812,9 +821,11 @@
if sample:
form_values = trans.sa_session.query( trans.app.model.FormValues ).get( sample.values.id )
form_values.content = sample_values
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
sample.name = new_sample_name
- sample.flush()
+ trans.sa_session.add( sample )
+ trans.sa_session.flush()
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='list',
operation='show_request',
@@ -848,8 +859,7 @@
s = request.has_sample(sample_name)
if s:
trans.sa_session.delete( s )
- s.flush()
- request.flush()
+ trans.sa_session.flush()
del current_samples[sample_index]
return trans.fill_template( '/admin/requests/show_request.mako',
request=request,
@@ -1026,21 +1036,22 @@
for index, sample in enumerate(request.samples):
bar_code = util.restore_text(params.get('sample_%i_bar_code' % index, ''))
sample.bar_code = bar_code
- sample.flush()
+ trans.sa_session.add( sample )
+ trans.sa_session.flush()
# change the state of all the samples to the next state
# get the new state
new_state = request.type.states[1]
for s in request.samples:
event = trans.app.model.SampleEvent(s, new_state, 'Bar code added to this sample')
- event.flush()
+ trans.sa_session.add( event )
+ trans.sa_session.flush()
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='list',
operation='show_request',
id=trans.security.encode_id(request.id),
message='Bar codes have been saved for this request',
status='done'))
-
- def __set_request_state(self, request):
+ def __set_request_state( self, trans, request ):
# check if all the samples of the current request are in the final state
complete = True
for s in request.samples:
@@ -1050,9 +1061,8 @@
request.state = request.states.COMPLETE
else:
request.state = request.states.SUBMITTED
- request.flush()
-
-
+ trans.sa_session.add( request )
+ trans.sa_session.flush()
def change_state(self, trans, sample):
possible_states = sample.request.type.states
curr_state = sample.current_state()
@@ -1089,8 +1099,9 @@
trans.app.model.SampleState.table.c.id == selected_state ) ) \
.first()
event = trans.app.model.SampleEvent(sample, new_state, comments)
- event.flush()
- self.__set_request_state(sample.request)
+ trans.sa_session.add( event )
+ trans.sa_session.flush()
+ self.__set_request_state( trans, sample.request )
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='show_events',
sample_id=sample.id))
@@ -1208,17 +1219,19 @@
rt.desc = util.restore_text( params.description ) or ""
rt.request_form = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( params.request_form_id ) )
rt.sample_form = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( params.sample_form_id ) )
- rt.flush()
+ trans.sa_session.add( rt )
+ trans.sa_session.flush()
# set sample states
ss_list = trans.sa_session.query( trans.app.model.SampleState ).filter( trans.app.model.SampleState.table.c.request_type_id == rt.id )
for ss in ss_list:
trans.sa_session.delete( ss )
- ss.flush()
+ trans.sa_session.flush()
for i in range( num_states ):
name = util.restore_text( params.get( 'state_name_%i' % i, None ))
desc = util.restore_text( params.get( 'state_desc_%i' % i, None ))
ss = trans.app.model.SampleState(name, desc, rt)
- ss.flush()
+ trans.sa_session.add( ss )
+ trans.sa_session.flush()
msg = "The new request type named '%s' with %s state(s) has been created" % (rt.name, num_states)
return rt, msg
@web.expose
@@ -1229,7 +1242,7 @@
messagetype = params.get( 'messagetype', 'done' )
rt = trans.sa_session.query( trans.app.model.RequestType ).get( int( util.restore_text( params.request_type_id ) ) )
rt.deleted = True
- rt.flush()
+ trans.sa_session.flush()
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='manage_request_types',
msg='Request type <b>%s</b> has been deleted' % rt.name,
@@ -1242,7 +1255,7 @@
messagetype = params.get( 'messagetype', 'done' )
rt = trans.sa_session.query( trans.app.model.RequestType ).get( int( util.restore_text( params.request_type_id ) ) )
rt.deleted = False
- rt.flush()
+ trans.sa_session.flush()
return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='manage_request_types',
msg='Request type <b>%s</b> has been undeleted' % rt.name,
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/root.py Wed Nov 11 15:59:41 2009 -0500
@@ -460,7 +460,8 @@
except:
association = None
new_history.add_galaxy_session( galaxy_session, association=association )
- new_history.flush()
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
if not user_history.datasets:
trans.set_history( new_history )
trans.log_event( "History imported, id: %s, name: '%s': " % (str(new_history.id) , new_history.name ) )
@@ -479,7 +480,8 @@
except:
association = None
new_history.add_galaxy_session( galaxy_session, association=association )
- new_history.flush()
+ trans.sa_session.add( new_history )
+ trans.sa_session.flush()
trans.set_history( new_history )
trans.log_event( "History imported, id: %s, name: '%s': " % (str(new_history.id) , new_history.name ) )
return trans.show_ok_message( """
@@ -506,7 +508,8 @@
else:
permissions = trans.app.security_agent.history_get_default_permissions( history )
trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
- data.flush()
+ trans.sa_session.add( data )
+ trans.sa_session.flush()
data_file = open( data.file_name, "wb" )
file_data.file.seek( 0 )
data_file.write( file_data.file.read() )
@@ -515,11 +518,11 @@
data.set_size()
data.init_meta()
data.set_meta()
- data.flush()
+ trans.sa_session.flush()
history.add_dataset( data )
- history.flush()
+ trans.sa_session.flush()
data.set_peek()
- data.flush()
+ trans.sa_session.flush()
trans.log_event("Added dataset %d to history %d" %(data.id, trans.history.id))
return trans.show_ok_message("Dataset "+str(data.hid)+" added to history "+str(history_id)+".")
except Exception, e:
@@ -567,7 +570,8 @@
## history = trans.app.model.History.get( old_data.history_id )
history = trans.get_history()
history.add_dataset(new_data)
- new_data.flush()
+ trans.sa_session.add( new_data )
+ trans.sa_session.flush()
return trans.show_message( "<p>Secondary dataset has been made primary.</p>", refresh_frames=['history'] )
except:
return trans.show_error_message( "<p>Failed to make secondary dataset primary.</p>" )
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/tracks.py Wed Nov 11 15:59:41 2009 -0500
@@ -250,7 +250,9 @@
new_dataset = dataset.datatype.convert_dataset( trans, dataset, type, return_output = True, visible = False ).values()[0]
new_dataset.hid = dataset.hid # Hrrmmm....
new_dataset.name = dataset.name
- new_dataset.flush()
+ trans.sa_session.add( new_dataset )
+ trans.sa_session.flush()
assoc.dataset = new_dataset
- assoc.flush()
+ trans.sa_session.add( assoc )
+ trans.sa_session.flush()
return new_dataset
diff -r 0edb42925161 -r b33b8f5e03b8 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Wed Nov 11 15:13:12 2009 -0500
+++ b/lib/galaxy/web/controllers/user.py Wed Nov 11 15:59:41 2009 -0500
@@ -46,7 +46,8 @@
conf_pass_err = "New passwords do not match."
else:
user.set_password_cleartext( new_pass )
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
trans.log_event( "User change password" )
return trans.show_ok_message( "Password has been changed for " + user.email)
# Generate input form
@@ -74,7 +75,8 @@
conf_email_err = "Email addresses do not match."
else:
user.email = email
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
trans.log_event( "User change email" )
return trans.show_ok_message( "Email has been changed to: " + user.email, refresh_frames=['masthead', 'history'] )
return trans.show_form(
@@ -99,7 +101,8 @@
username_err = "This username is not available"
else:
user.username = username
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
trans.log_event( "User change username" )
return trans.show_ok_message( "Username been set to: " + user.username )
else:
@@ -195,7 +198,8 @@
user = trans.app.model.User( email=email )
user.set_password_cleartext( password )
user.username = username
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
trans.app.security_agent.create_private_user_role( user )
# We set default user permissions, before we log in and set the default history permissions
trans.app.security_agent.user_set_default_permissions( user, default_access_private = trans.app.config.new_user_dataset_access_role_default_private )
@@ -289,7 +293,8 @@
user_address.postal_code = util.restore_text(params.get('field_%i_postal_code' % index, ''))
user_address.country = util.restore_text(params.get('field_%i_country' % index, ''))
user_address.phone = util.restore_text(params.get('field_%i_phone' % index, ''))
- user_address.flush()
+ trans.sa_session.add( user_address )
+ trans.sa_session.flush()
trans.sa_session.refresh( user )
values.append(int(user_address.id))
elif value == unicode('none'):
@@ -303,13 +308,15 @@
if new_user or not user.values:
# new user or existing
form_values = trans.app.model.FormValues(user_info_form, values)
- form_values.flush()
+ trans.sa_session.add( form_values )
+ trans.sa_session.flush()
user.values = form_values
elif user.values:
# editing the user info of an existing user with existing user info
user.values.content = values
- user.values.flush()
- user.flush()
+ trans.sa_session.add( user.values )
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
def __validate_email(self, trans, params, email, user=None):
error = None
if user:
@@ -489,7 +496,8 @@
# the new email & username
user.email = email
user.username = username
- user.flush()
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
msg = 'The login information has been updated with the changes'
if params.get('admin_view', 'False') == 'True':
return trans.response.send_redirect( web.url_for( controller='user',
@@ -532,7 +540,8 @@
messagetype='error') )
# save new password
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/9b9c3603fd09
changeset: 3016:9b9c3603fd09
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 16:52:18 2009 -0500
description:
Quick fix for external cluster metadata, but it doesn't work with the upload tool.
diffstat:
lib/galaxy/jobs/runners/pbs.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r b33b8f5e03b8 -r 9b9c3603fd09 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 15:59:41 2009 -0500
+++ b/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 16:52:18 2009 -0500
@@ -222,7 +222,7 @@
script = pbs_symlink_template % (job_wrapper.galaxy_lib_dir, " ".join(job_wrapper.get_input_fnames() + output_files), self.app.config.pbs_stage_path, exec_dir, command_line)
else:
if self.app.config.set_metadata_externally:
- external_metadata_script = job_wrapper.setup_external_metadata( exec_dir = exec_dir, tmp_dir = self.app.config.new_file_path, dataset_files_path = self.app.model.Dataset.file_path, output_fnames = output_fnames, kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
+ external_metadata_script = job_wrapper.setup_external_metadata( exec_dir = os.path.abspath( os.getcwd() ), tmp_dir = self.app.config.new_file_path, dataset_files_path = self.app.model.Dataset.file_path, output_fnames = output_fnames, kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
else:
external_metadata_script = ""
script = pbs_template % ( job_wrapper.galaxy_lib_dir, exec_dir, command_line, external_metadata_script )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/18586d1194f9
changeset: 3017:18586d1194f9
user: rc
date: Wed Nov 11 17:55:08 2009 -0500
description:
Added sff datatype
Also, made changes to upload.py to sniff sff binary files
diffstat:
lib/galaxy/datatypes/data.py | 34 ++++++++++++++++-
lib/galaxy/datatypes/registry.py | 3 +
lib/galaxy/datatypes/sniff.py | 3 +
lib/galaxy/datatypes/test/1.sff |
test-data/1.sff |
test/functional/test_sniffing_and_metadata_settings.py | 11 +++++
tools/data_source/upload.py | 4 ++
7 files changed, 54 insertions(+), 1 deletions(-)
diffs (124 lines):
diff -r 9b9c3603fd09 -r 18586d1194f9 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Wed Nov 11 16:52:18 2009 -0500
+++ b/lib/galaxy/datatypes/data.py Wed Nov 11 17:55:08 2009 -0500
@@ -1,4 +1,4 @@
-import logging, os, sys, time, tempfile
+import logging, os, sys, time, tempfile, binascii
from galaxy import util
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
@@ -455,3 +455,35 @@
class Newick( Text ):
pass
+
+class Sff( Binary ):
+ """ Standard Flowgram Format (SFF) """
+ file_ext = "sff"
+ def __init__( self, **kwd ):
+ Binary.__init__(self, **kwd)
+ def init_meta( self, dataset, copy_from=None ):
+ Binary.init_meta( self, dataset, copy_from=copy_from )
+ def sniff( self, filename ):
+ '''
+ The first 4 bytes of any sff file is '.sff'
+
+ >>> fname = get_test_fname( '1.sff' )
+ >>> Sff().sniff( fname )
+ True
+ '''
+ header = open( filename ).read(4)
+ if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ):
+ return True
+ return False
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ dataset.peek = "Binary sff file"
+ dataset.blurb = nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek(self, dataset):
+ try:
+ return dataset.peek
+ except:
+ return "sff file (%s)" % ( nice_size( dataset.get_size() ) )
diff -r 9b9c3603fd09 -r 18586d1194f9 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Wed Nov 11 16:52:18 2009 -0500
+++ b/lib/galaxy/datatypes/registry.py Wed Nov 11 17:55:08 2009 -0500
@@ -133,6 +133,7 @@
'qual454' : qualityscore.QualityScore454(),
'sam' : tabular.Sam(),
'scf' : images.Scf(),
+ 'sff' : data.Sff(),
'tabular' : tabular.Tabular(),
'taxonomy' : tabular.Taxonomy(),
'txt' : data.Text(),
@@ -162,6 +163,7 @@
'qual454' : 'text/plain',
'sam' : 'text/plain',
'scf' : 'application/octet-stream',
+ 'sff' : 'application/octet-stream',
'tabular' : 'text/plain',
'taxonomy' : 'text/plain',
'txt' : 'text/plain',
@@ -172,6 +174,7 @@
# because some formats are much more flexibly defined than others.
if len(self.sniff_order) < 1:
self.sniff_order = [
+ data.Sff(),
xml.BlastXml(),
sequence.Maf(),
sequence.Lav(),
diff -r 9b9c3603fd09 -r 18586d1194f9 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py Wed Nov 11 16:52:18 2009 -0500
+++ b/lib/galaxy/datatypes/sniff.py Wed Nov 11 17:55:08 2009 -0500
@@ -249,6 +249,9 @@
>>> fname = get_test_fname('alignment.lav')
>>> guess_ext(fname)
'lav'
+ >>> fname = get_test_fname('1.sff')
+ >>> guess_ext(fname)
+ 'sff'
"""
if sniff_order is None:
datatypes_registry = registry.Registry()
diff -r 9b9c3603fd09 -r 18586d1194f9 lib/galaxy/datatypes/test/1.sff
Binary file lib/galaxy/datatypes/test/1.sff has changed
diff -r 9b9c3603fd09 -r 18586d1194f9 test-data/1.sff
Binary file test-data/1.sff has changed
diff -r 9b9c3603fd09 -r 18586d1194f9 test/functional/test_sniffing_and_metadata_settings.py
--- a/test/functional/test_sniffing_and_metadata_settings.py Wed Nov 11 16:52:18 2009 -0500
+++ b/test/functional/test_sniffing_and_metadata_settings.py Wed Nov 11 17:55:08 2009 -0500
@@ -246,6 +246,17 @@
assert latest_hda is not None, "Problem retrieving fastq hda from the database"
if not latest_hda.name == '2gen.fastq' and not latest_hda.extension == 'fastq':
raise AssertionError, "fastq data type was not correctly sniffed."
+ def test_0100_sff_datatype( self ):
+ """Testing correctly sniffing sff format upon upload"""
+ self.upload_file( '1.sff' )
+ self.verify_dataset_correctness( '1.sff' )
+ self.check_history_for_string( 'format: <span class="sff">sff' )
+ latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+ .first()
+ assert latest_hda is not None, "Problem retrieving sff hda from the database"
+ if not latest_hda.name == '1.sff' and not latest_hda.extension == 'sff':
+ raise AssertionError, "sff data type was not correctly sniffed."
def test_9999_clean_up( self ):
self.delete_history( id=self.security.encode_id( history1.id ) )
self.logout()
diff -r 9b9c3603fd09 -r 18586d1194f9 tools/data_source/upload.py
--- a/tools/data_source/upload.py Wed Nov 11 16:52:18 2009 -0500
+++ b/tools/data_source/upload.py Wed Nov 11 17:55:08 2009 -0500
@@ -234,6 +234,10 @@
else:
ext = dataset.file_type
data_type = ext
+ elif data_type == 'binary' and ext == 'auto':
+ # currently we are only sniffing sff binary files
+ ext = sniff.guess_ext( dataset.path )
+ data_type = ext
# Save job info for the framework
if ext == 'auto' and dataset.ext:
ext = dataset.ext
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/dc9b5a47754c
changeset: 3012:dc9b5a47754c
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 14:41:32 2009 -0500
description:
Fix for issue #210, redirect to the proper path when the tool executed form is instructed to redirect.
diffstat:
templates/tool_executed.mako | 6 +++---
1 files changed, 3 insertions(+), 3 deletions(-)
diffs (27 lines):
diff -r d36d08cc4abb -r dc9b5a47754c templates/tool_executed.mako
--- a/templates/tool_executed.mako Wed Nov 11 13:53:48 2009 -0500
+++ b/templates/tool_executed.mako Wed Nov 11 14:41:32 2009 -0500
@@ -28,7 +28,7 @@
}
function refresh() {
- top.location.href = '${request.base}';
+ top.location.href = '${h.url_for( "/" )}';
}
</script>
@@ -53,11 +53,11 @@
</p>
%if tool.options.refresh:
-<p id="refresh_message" style="display: none;">You are now being redirected back to <a href="${request.base}">Galaxy</a></div>
+<p id="refresh_message" style="display: none;">You are now being redirected back to <a href="${h.url_for( '/' )}">Galaxy</a></div>
%endif
</div>
</body>
-</html>
\ No newline at end of file
+</html>
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/120a34362c82
changeset: 3013:120a34362c82
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 14:57:34 2009 -0500
description:
Include the tool name in the job name too
diffstat:
lib/galaxy/jobs/runners/pbs.py | 4 ++--
1 files changed, 2 insertions(+), 2 deletions(-)
diffs (21 lines):
diff -r dc9b5a47754c -r 120a34362c82 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 14:41:32 2009 -0500
+++ b/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 14:57:34 2009 -0500
@@ -204,7 +204,7 @@
job_attrs[3].name = pbs.ATTR_stageout
job_attrs[3].value = stageout
job_attrs[4].name = pbs.ATTR_N
- job_attrs[4].value = "%s" % job_wrapper.job_id
+ job_attrs[4].value = "%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id )
exec_dir = os.path.abspath( os.getcwd() )
# If not, we're using NFS
else:
@@ -214,7 +214,7 @@
job_attrs[1].name = pbs.ATTR_e
job_attrs[1].value = efile
job_attrs[2].name = pbs.ATTR_N
- job_attrs[2].value = "%s" % job_wrapper.job_id
+ job_attrs[2].value = "%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id )
exec_dir = os.getcwd()
# write the job script
1
0