[hg] galaxy 2915: Make Galaxy attempt to honor the user's umask ...
details: http://www.bx.psu.edu/hg/galaxy/rev/1ce07e82a4c1 changeset: 2915:1ce07e82a4c1 user: Nate Coraor <nate@bx.psu.edu> date: Fri Oct 23 14:33:23 2009 -0400 description: Make Galaxy attempt to honor the user's umask and primary group (output datasets can have the wrong primary group if the primary group differs on the cluster). Also fixed a composite datatype files_path bug in the upload tool. 9 file(s) affected in this change: lib/galaxy/config.py lib/galaxy/jobs/__init__.py lib/galaxy/tools/__init__.py lib/galaxy/tools/actions/__init__.py lib/galaxy/tools/actions/upload_common.py lib/galaxy/util/__init__.py lib/galaxy/web/controllers/tool_runner.py tools/data_source/upload.py tools/data_source/upload.xml diffs (204 lines): diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/config.py --- a/lib/galaxy/config.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/config.py Fri Oct 23 14:33:23 2009 -0400 @@ -23,6 +23,10 @@ def __init__( self, **kwargs ): self.config_dict = kwargs self.root = kwargs.get( 'root_dir', '.' ) + # Collect the umask and primary gid from the environment + self.umask = os.umask( 077 ) # get the current umask + os.umask( self.umask ) # can't get w/o set, so set it back + self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster # Database related configuration self.database = resolve_path( kwargs.get( "database_file", "database/universe.d" ), self.root ) self.database_connection = kwargs.get( "database_connection", False ) diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/jobs/__init__.py Fri Oct 23 14:33:23 2009 -0400 @@ -594,9 +594,10 @@ self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data, out_data=out_data, param_dict=param_dict, tool=self.tool, stdout=stdout, stderr=stderr ) - # TODO - # validate output datasets job.command_line = self.command_line + # fix permissions + for path in [ dp.real_path for dp in self.get_output_fnames() ]: + util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid ) self.sa_session.flush() log.debug( 'job %d ended' % self.job_id ) self.cleanup() diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/tools/__init__.py Fri Oct 23 14:33:23 2009 -0400 @@ -1463,6 +1463,14 @@ if len( os.listdir( temp_file_path ) ) > 0: store_file_path = os.path.join( os.path.join( self.app.config.file_path, *directory_hash_id( hda.dataset.id ) ), "dataset_%d_files" % hda.dataset.id ) shutil.move( temp_file_path, store_file_path ) + # fix permissions + for basedir, dirs, files in os.walk( store_file_path ): + util.umask_fix_perms( basedir, self.app.config.umask, 0777, self.app.config.gid ) + for file in files: + path = os.path.join( basedir, file ) + if os.path.islink( path ): + continue # ignore symlinks + util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid ) except: continue diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/tools/actions/__init__.py Fri Oct 23 14:33:23 2009 -0400 @@ -202,6 +202,8 @@ trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions ) # Create an empty file immediately open( data.file_name, "w" ).close() + # Fix permissions + util.umask_fix_perms( data.file_name, trans.app.config.umask, 0666 ) # This may not be neccesary with the new parent/child associations data.designation = name # Copy metadata from one of the inputs if requested. diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/tools/actions/upload_common.py Fri Oct 23 14:33:23 2009 -0400 @@ -239,7 +239,6 @@ type = uploaded_dataset.type, metadata = uploaded_dataset.metadata, primary_file = uploaded_dataset.primary_file, - extra_files_path = data.extra_files_path, composite_file_paths = uploaded_dataset.composite_files, composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) ) else: diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/util/__init__.py Fri Oct 23 14:33:23 2009 -0400 @@ -3,7 +3,7 @@ """ import logging -import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile +import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile, stat, grp # Older py compatibility try: @@ -483,6 +483,38 @@ raise raise IOError, (errno.EEXIST, "No usable temporary file name found") +def umask_fix_perms( path, umask, unmasked_perms, gid=None ): + """ + umask-friendly permissions fixing + """ + perms = unmasked_perms & ~umask + st = os.stat( path ) + # fix modes + if stat.S_IMODE( st.st_mode ) != perms: + try: + os.chmod( path, perms ) + except Exception, e: + log.warning( 'Unable to honor umask (%s) for %s, tried to set: %s but mode remains %s, error was: %s' % ( oct( umask ), \ + path, + oct( perms ), + oct( stat.S_IMODE( st.st_mode ) ), + e ) ) + # fix group + if gid is not None and st.st_gid != gid: + try: + os.chown( path, -1, gid ) + except Exception, e: + try: + desired_group = grp.getgrgid( gid ) + current_group = grp.getgrgid( st.st_gid ) + except: + desired_group = gid + current_group = st.st_gid + log.warning( 'Unable to honor primary group (%s) for %s, group remains %s, error was: %s' % ( desired_group, \ + path, + current_group, + e ) ) + galaxy_root_path = os.path.join(__path__[0], "..","..","..") # The dbnames list is used in edit attributes and the upload tool dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) ) diff -r 50631e30ede2 -r 1ce07e82a4c1 lib/galaxy/web/controllers/tool_runner.py --- a/lib/galaxy/web/controllers/tool_runner.py Fri Oct 23 13:58:56 2009 -0400 +++ b/lib/galaxy/web/controllers/tool_runner.py Fri Oct 23 14:33:23 2009 -0400 @@ -140,7 +140,6 @@ permissions = trans.app.security_agent.history_get_default_permissions( trans.history ) def create_dataset( name ): ud = Bunch( name=name, file_type=None, dbkey=None ) - # Okay, time to make this crap actually use the upload_common functions, which means making them get called from outside the json_paramfile method. if nonfile_params.get( 'folder_id', False ): replace_id = nonfile_params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: diff -r 50631e30ede2 -r 1ce07e82a4c1 tools/data_source/upload.py --- a/tools/data_source/upload.py Fri Oct 23 13:58:56 2009 -0400 +++ b/tools/data_source/upload.py Fri Oct 23 14:33:23 2009 -0400 @@ -119,8 +119,8 @@ def parse_outputs( args ): rval = {} for arg in args: - id, path = arg.split( ':', 1 ) - rval[int( id )] = path + id, files_path, path = arg.split( ':', 2 ) + rval[int( id )] = ( path, files_path ) return rval def add_file( dataset, json_file, output_path ): @@ -255,9 +255,9 @@ line_count = line_count ) json_file.write( to_json_string( info ) + "\n" ) -def add_composite_file( dataset, json_file, output_path ): +def add_composite_file( dataset, json_file, output_path, files_path ): if dataset.composite_files: - os.mkdir( dataset.extra_files_path ) + os.mkdir( files_path ) for name, value in dataset.composite_files.iteritems(): value = util.bunch.Bunch( **value ) if dataset.composite_file_paths[ value.name ] is None and not value.optional: @@ -269,7 +269,7 @@ sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] ) else: sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] ) - shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) ) + shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( files_path, name ) ) # Move the dataset to its "real" path shutil.move( dataset.primary_file, output_path ) # Write the job info @@ -290,12 +290,13 @@ dataset = from_json_string( line ) dataset = util.bunch.Bunch( **safe_dict( dataset ) ) try: - output_path = output_paths[int( dataset.dataset_id )] + output_path = output_paths[int( dataset.dataset_id )][0] except: print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id sys.exit( 1 ) if dataset.type == 'composite': - add_composite_file( dataset, json_file, output_path ) + files_path = output_paths[int( dataset.dataset_id )][1] + add_composite_file( dataset, json_file, output_path, files_path ) else: add_file( dataset, json_file, output_path ) # clean up paramfile diff -r 50631e30ede2 -r 1ce07e82a4c1 tools/data_source/upload.xml --- a/tools/data_source/upload.xml Fri Oct 23 13:58:56 2009 -0400 +++ b/tools/data_source/upload.xml Fri Oct 23 14:33:23 2009 -0400 @@ -1,6 +1,6 @@ <?xml version="1.0"?> -<tool name="Upload File" id="upload1" version="1.0.3"> +<tool name="Upload File" id="upload1" version="1.0.4"> <description> from your computer </description> @@ -11,7 +11,7 @@ #while $varExists('output%i' % $outnum): #set $output = $getVar('output%i' % $outnum) #set $outnum += 1 - ${output.dataset.dataset.id}:${output} + ${output.dataset.dataset.id}:${output.files_path}:${output} #end while </command> <inputs nginx_upload="true">
participants (1)
-
Greg Von Kuster