commit/galaxy-central: greg: Per request from Peter Cock, turn the "Copy data into Galaxy?" check box into a select list that clarifies the behavior. The default is to copy files into Galaxy.
1 new changeset in galaxy-central: http://bitbucket.org/galaxy/galaxy-central/changeset/70d7d0bfaad6/ changeset: r5217:70d7d0bfaad6 user: greg date: 2011-03-11 20:56:11 summary: Per request from Peter Cock, turn the "Copy data into Galaxy?" check box into a select list that clarifies the behavior. The default is to copy files into Galaxy. affected #: 5 files (617 bytes) --- a/lib/galaxy/tools/actions/upload_common.py Fri Mar 11 13:36:29 2011 -0500 +++ b/lib/galaxy/tools/actions/upload_common.py Fri Mar 11 14:56:11 2011 -0500 @@ -280,7 +280,7 @@ try: link_data_only = uploaded_dataset.link_data_only except: - link_data_only = False + link_data_only = 'copy_files' json = dict( file_type = uploaded_dataset.file_type, ext = uploaded_dataset.ext, name = uploaded_dataset.name, --- a/lib/galaxy/web/controllers/library_common.py Fri Mar 11 13:36:29 2011 -0500 +++ b/lib/galaxy/web/controllers/library_common.py Fri Mar 11 14:56:11 2011 -0500 @@ -717,7 +717,7 @@ space_to_tab = params.get( 'files_0|space_to_tab', '' ) else: space_to_tab = params.get( 'space_to_tab', '' ) - link_data_only = params.get( 'link_data_only', '' ) + link_data_only = params.get( 'link_data_only', 'copy_files' ) dbkey = params.get( 'dbkey', '?' ) if isinstance( dbkey, list ): last_used_build = dbkey[0] @@ -1044,8 +1044,9 @@ if in_folder: uploaded_dataset.in_folder = in_folder uploaded_dataset.data = upload_common.new_upload( trans, cntrller, uploaded_dataset, library_bunch ) - if params.get( 'link_data_only', False ): - uploaded_dataset.link_data_only = True + link_data_only = params.get( 'link_data_only', 'copy_files' ) + uploaded_dataset.link_data_only = link_data_only + if link_data_only == 'link_to_files': uploaded_dataset.data.file_name = os.path.abspath( path ) # Since we are not copying the file into Galaxy's managed # default file location, the dataset should never be purgable. @@ -1059,7 +1060,8 @@ for entry in os.listdir( full_dir ): # Only import regular files path = os.path.join( full_dir, entry ) - if os.path.islink( full_dir ) and params.get( 'link_data_only', False ): + link_data_only = params.get( 'link_data_only', 'copy_files' ) + if os.path.islink( full_dir ) and link_data_only == 'link_to_files': # If we're linking instead of copying and the # sub-"directory" in the import dir is actually a symlink, # dereference the symlink, but not any of its contents. @@ -1068,7 +1070,7 @@ path = os.path.join( link_path, entry ) else: path = os.path.abspath( os.path.join( link_path, entry ) ) - elif os.path.islink( path ) and os.path.isfile( path ) and params.get( 'link_data_only', False ): + elif os.path.islink( path ) and os.path.isfile( path ) and link_data_only == 'link_to_files': # If we're linking instead of copying and the "file" in the # sub-directory of the import dir is actually a symlink, # dereference the symlink (one dereference only, Vasili). @@ -1146,7 +1148,7 @@ space_to_tab = params.get( 'files_0|space_to_tab', '' ) else: space_to_tab = params.get( 'space_to_tab', '' ) - link_data_only = params.get( 'link_data_only', '' ) + link_data_only = params.get( 'link_data_only', 'copy_files' ) dbkey = params.get( 'dbkey', '?' ) if isinstance( dbkey, list ): last_used_build = dbkey[0] --- a/templates/library/common/common.mako Fri Mar 11 13:36:29 2011 -0500 +++ b/templates/library/common/common.mako Fri Mar 11 14:56:11 2011 -0500 @@ -218,33 +218,33 @@ <input type="checkbox" name="preserve_dirs" value="Yes" checked="true" />Yes </div><div class="toolParamHelp" style="clear: both;"> - If checked (default), library subfolders will be used to preserve any subdirectories on the filesystem.<br /> + If checked (default), library sub-folders will be used to preserve any subdirectories on the filesystem. If unchecked, any files in subdirectories on the filesystem will be placed directly in the library folder. </div></div> %endif %if upload_option in ( 'upload_directory', 'upload_paths' ): <div class="form-row"> - <% - if link_data_only == 'No': - checked = ' checked' - else: - checked = '' - link_data_only_field = '<input type="checkbox" name="link_data_only" value="No"%s/>No' % checked - %><label>Copy data into Galaxy?</label><div class="form-row-input"> - ${link_data_only_field} + <select name="link_data_only"> + %if not link_data_only or link_data_only == 'copy_files': + <option value="copy_files" selected>Copy files into Galaxy + <option value="link_to_files">Link to files without copying into Galaxy + %else: + <option value="copy_files">Copy files into Galaxy + <option value="link_to_files" selected>Link to files without copying into Galaxy + %endif + </select></div><div class="toolParamHelp" style="clear: both;"> - Normally data uploaded with this tool is copied into Galaxy's "files" directory - so any later changes to the data will not affect Galaxy. However, this may not - be desired (especially for large NGS datasets), so use of this option will - force Galaxy to always read the data from its original path. + Normally data uploaded with this tool is copied into Galaxy's configured "file_path" location where Galaxy + has a form of control over the data files. However, this may not be desired (especially for large NGS + datasets), so using the option labeled "Link to files without copying into Galaxy" will force Galaxy to + always read the data from its original path. %if upload_option == 'upload_directory': - Any symlinks encountered in the upload directory will be dereferenced once - - that is, Galaxy will point directly to the file that is linked, but no other - symlinks further down the line will be dereferenced. + Any symlinks encountered in the uploaded directory will be dereferenced once. That is, Galaxy will + point directly to the file that is linked, but no other symlinks further down the line will be dereferenced. %endif </div></div> --- a/test/base/twilltestcase.py Fri Mar 11 13:36:29 2011 -0500 +++ b/test/base/twilltestcase.py Fri Mar 11 14:56:11 2011 -0500 @@ -2004,7 +2004,7 @@ # Library dataset stuff def upload_library_dataset( self, cntrller, library_id, folder_id, filename='', server_dir='', replace_id='', upload_option='upload_file', file_type='auto', dbkey='hg18', space_to_tab='', - link_data_only='', preserve_dirs='Yes', roles=[], ldda_message='', hda_ids='', + link_data_only='copy_files', preserve_dirs='Yes', roles=[], ldda_message='', hda_ids='', template_refresh_field_name='1_field_name', template_refresh_field_contents='', template_fields=[], show_deleted='False', strings_displayed=[] ): """Add datasets to library using any upload_option""" @@ -2031,10 +2031,6 @@ tc.fv( "1", "dbkey", dbkey ) if space_to_tab: tc.fv( "1", "space_to_tab", space_to_tab ) - if link_data_only: - tc.fv( "1", "link_data_only", link_data_only ) - if upload_option == 'filesystem_paths' and preserve_dirs == 'Yes': - tc.fv( "1", "preserve_dirs", preserve_dirs ) for role_id in roles: tc.fv( "1", "roles", role_id ) # Refresh the form by selecting the upload_option - we do this here to ensure @@ -2051,18 +2047,21 @@ tc.fv( "add_history_datasets_to_library", "hda_ids", '1' ) tc.submit( 'add_history_datasets_to_library_button' ) else: - if filename: - filename = self.get_filename( filename ) - tc.formfile( "1", "files_0|file_data", filename ) - elif server_dir: + if upload_option == 'filesystem_paths' or upload_option == 'upload_directory': + tc.fv( "1", "link_data_only", link_data_only ) + if upload_option == 'filesystem_paths' and preserve_dirs == 'Yes': + tc.fv( "1", "preserve_dirs", preserve_dirs ) + if upload_option == 'upload_directory' and server_dir: tc.fv( "1", "server_dir", server_dir ) + if upload_option == 'upload_file': + if filename: + filename = self.get_filename( filename ) + tc.formfile( "1", "files_0|file_data", filename ) for check_str in strings_displayed: self.check_page_for_string( check_str ) tc.submit( "runtool_btn" ) # Give the files some time to finish uploading self.library_wait( library_id ) - data = self.last_page() - file( 'greg1.html', 'wb' ).write( data ) self.home() def ldda_permissions( self, cntrller, library_id, folder_id, id, role_ids_str, permissions_in=[], permissions_out=[], strings_displayed=[], ldda_name='' ): --- a/tools/data_source/upload.py Fri Mar 11 13:36:29 2011 -0500 +++ b/tools/data_source/upload.py Fri Mar 11 14:56:11 2011 -0500 @@ -157,6 +157,7 @@ line_count = None converted_path = None stdout = None + link_data_only = dataset.get( 'link_data_only', 'copy_files' ) try: ext = dataset.file_type @@ -334,7 +335,7 @@ return if data_type != 'binary': # don't convert newlines on data we're only going to symlink - if not dataset.get( 'link_data_only', False ): + if link_data_only == 'link_to_files': in_place = True if dataset.type in ( 'server_dir', 'path_paste' ): in_place = False @@ -353,9 +354,7 @@ if ext == 'auto': ext = 'data' # Move the dataset to its "real" path - if dataset.get( 'link_data_only', False ): - pass # data will remain in place - elif dataset.type in ( 'server_dir', 'path_paste' ): + if link_data_only == 'copy_files' and dataset.type in ( 'server_dir', 'path_paste' ): if converted_path is not None: shutil.copy( converted_path, output_path ) try: @@ -365,7 +364,7 @@ else: # this should not happen, but it's here just in case shutil.copy( dataset.path, output_path ) - else: + elif link_data_only == 'copy_files': shutil.move( dataset.path, output_path ) # Write the job info stdout = stdout or 'uploaded %s file' % data_type Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket